From 74e1509e1b41320a364de64f76e3b84c0e5fb4df Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Tue, 24 Jan 2017 14:46:04 -0700 Subject: [PATCH 01/27] updates for CAM-Chem from Simone --- atm_diag/tables_chem.ncl | 100 +++++++++++++++++++++++++++------------ atm_diag/tables_soa.ncl | 42 ++++++++++++++-- 2 files changed, 108 insertions(+), 34 deletions(-) diff --git a/atm_diag/tables_chem.ncl b/atm_diag/tables_chem.ncl index 4377f945..f20ba1b2 100644 --- a/atm_diag/tables_chem.ncl +++ b/atm_diag/tables_chem.ncl @@ -94,8 +94,8 @@ ntime1 = dimsizes(time1) end if aerod1 = new((/ntime1,nlat1,nlon1/),"float") - if (isfilevar(inptr1, "AEROD_v")) - aerod1 = inptr1->AEROD_v + if (isfilevar(inptr1, "AODVIS")) + aerod1 = inptr1->AODVIS else aerod1 = 0. end if @@ -110,7 +110,7 @@ ntime1 = dimsizes(time1) delete(dst3) delete(dst4) delete(dst1) -delete(dst2) + delete(dst2) end if if (isfilevar(inptr1, "AODDUST1").and.isfilevar(inptr1, "AODDUST2").and.isfilevar(inptr1, "AODDUST3")) dst1 = inptr1->AODDUST1 @@ -119,8 +119,27 @@ delete(dst2) dst11 = dst1+dst2+dst3 delete(dst1) delete(dst2) -delete(dst3) + delete(dst3) + else + if (isfilevar(inptr1, "AODDUST")) + dst1 = inptr1->AODDUST + dst11 = dst1 + delete(dst1) + end if end if + dlon1=360./nlon1 + coffa=pi*re^2./180. + area1 = aerod1 + area1 = 1. + area1 = area1 + aerod1*0. + do ii=0,dimsizes(gw1)-1 + aerod1(:,ii,:)=aerod1(:,ii,:)*coffa*dlon1*gw1(ii) + dst11(:,ii,:)=dst11(:,ii,:)*coffa*dlon1*gw1(ii) + area1(:,ii,:)=area1(:,ii,:)*coffa*dlon1*gw1(ii) + end do + taerod1 = sum(aerod1)/sum(area1) + tdst11 = sum(dst11)/sum(area1) + delete(area1) ps1 = inptr1->PS if (isfilevar(inptr1,"O3")) then @@ -189,8 +208,8 @@ ntime2 = dimsizes(time2) cochm2 = 0. end if aerod2 = new((/ntime2,nlat2,nlon2/),"float") - if (isfilevar(inptr2, "AEROD_v")) - aerod2 = inptr2->AEROD_v + if (isfilevar(inptr2, "AODVIS")) + aerod2 = inptr2->AODVIS else aerod2 = 0. end if @@ -215,8 +234,28 @@ delete(dst4) dst22 = dst1+dst2+dst3 delete(dst1) delete(dst2) -delete(dst3) + delete(dst3) + else + if (isfilevar(inptr2, "AODDUST")) + dst1 = inptr2->AODDUST + dst22 = dst1 + delete(dst1) + end if end if + dlon2=360./nlon2 + coffa=pi*re^2./180. + area2 = aerod2 + area2 = 1. + area2 = area2 + aerod2*0. + do ii=0,dimsizes(gw2)-1 + aerod2(:,ii,:)=aerod2(:,ii,:)*coffa*dlon2*gw2(ii) + dst22(:,ii,:)=dst22(:,ii,:)*coffa*dlon2*gw2(ii) + area2(:,ii,:)=area2(:,ii,:)*coffa*dlon2*gw2(ii) + end do + taerod2 = sum(aerod2)/sum(area2) + tdst22 = sum(dst22)/sum(area2) + delete(area2) + ps2 = inptr2->PS if (isfilevar(inptr2,"O3")) then @@ -277,21 +316,21 @@ p21 = pres_hybrid_ccm(ps2,p0,ha21,hb21) vint1 = new((/ntime1,nlev1,nlat1,nlon1/),"float") vint1(:,:,:,:) = 0. end if - if (isfilevar(inptr1,vchml)) then - chml1=inptr1->$vchml$ + if (isfilevar(inptr1,vchmll)) then + chml1=inptr1->$vchmll$ else - if (isfilevar(inptr1,vchmll)) then - chml1=inptr1->$vchmll$ + if (isfilevar(inptr1,vchml)) then + chml1=inptr1->$vchml$ else chml1 = new((/ntime1,nlev1,nlat1,nlon1/),"float") chml1(:,:,:,:) = 0. end if end if - if (isfilevar(inptr1,vchmp)) then - chmp1=inptr1->$vchmp$ + if (isfilevar(inptr1,vchmpp)) then + chmp1=inptr1->$vchmpp$ else - if (isfilevar(inptr1,vchmpp)) then - chmp1=inptr1->$vchmpp$ + if (isfilevar(inptr1,vchmp)) then + chmp1=inptr1->$vchmp$ else chmp1 = new((/ntime1,nlev1,nlat1,nlon1/),"float") chmp1(:,:,:,:) = 0. @@ -375,8 +414,6 @@ p21 = pres_hybrid_ccm(ps2,p0,ha21,hb21) tbchml1 = sum(bchml1)*86400.*365.*factor(i) temis1 = sum(emis1)*86400.*365.*1e-9*factor(i) tdf1 = sum(df1)*86400.*365.*1e-9*factor(i) - taerod1 = dim_avg_n(aerod1,(/1,2/)) - tdst11 = dim_avg_n(dst11,(/1,2/)) if vars(i).eq."O3" then if (isfilevar(inptr1,"O3")) then do ii=0,ddd1(2)-1 @@ -413,22 +450,22 @@ p21 = pres_hybrid_ccm(ps2,p0,ha21,hb21) vint2 = new((/ntime2,nlev2,nlat2,nlon2/),"float") vint2(:,:,:,:) = 0. end if - if (isfilevar(inptr2,vchml)) then - chml2=inptr2->$vchml$ + if (isfilevar(inptr2,vchmll)) then + chml2=inptr2->$vchmll$ else - if (isfilevar(inptr2,vchmll)) then - chml2=inptr2->$vchmll$ + if (isfilevar(inptr2,vchml)) then + chml2=inptr2->$vchml$ else chml2 = new((/ntime2,nlev2,nlat2,nlon2/),"float") chml2(:,:,:,:) = 0. end if end if ;if i.eq.2 then - if (isfilevar(inptr2,vchmp)) then - chmp2=inptr2->$vchmp$ + if (isfilevar(inptr2,vchmpp)) then + chmp2=inptr2->$vchmpp$ else - if (isfilevar(inptr2,vchmpp)) then - chmp2=inptr2->$vchmpp$ + if (isfilevar(inptr2,vchmp)) then + chmp2=inptr2->$vchmp$ else chmp2 = new((/ntime2,nlev2,nlat2,nlon2/),"float") chmp2(:,:,:,:) = 0. @@ -495,7 +532,6 @@ p21 = pres_hybrid_ccm(ps2,p0,ha21,hb21) end if coo2=new((/ddd2(2)/),float) - dst22 = dim_avg_n(dst22,(/1,2/)) do ii=0, ddd2(2)-1 coo2(ii) = gw2(ii)*(2.*pi/nlon2)*re^2. end do @@ -511,8 +547,6 @@ p21 = pres_hybrid_ccm(ps2,p0,ha21,hb21) tbchml2 = sum(bchml2)*86400.*365.*factor(i) temis2 = sum(emis2)*86400.*365.*1e-9*factor(i) tdf2 = sum(df2)*86400.*365.*1e-9*factor(i) - taerod2 = dim_avg_n(aerod2,(/1,2/)) - tdst22 = dim_avg_n(dst22,(/1,2/)) if (isfilevar(inptr2,"O3")) then if vars(i).eq."O3" then do ii=0,ddd2(2)-1 @@ -636,17 +670,21 @@ end do global((i-1)*5+3,0)= tlno_prod1 if (compare .ne. "OBS") then global((i-1)*5+3,1)= tlno_prod2 - global((i-1)*5+3,2) = global(5*(i-1)+2,1) - global(5*(i-1)+2,0) ; diff + global((i-1)*5+3,2) = global(5*(i-1)+3,1) - global(5*(i-1)+3,0) ; diff end if global((i-1)*5+4,0)= taerod1 if (compare .ne. "OBS") then global((i-1)*5+4,1)= taerod2 - global((i-1)*5+4,2) = global(5*(i-1)+3,1) - global(5*(i-1)+3,0) ; diff + global((i-1)*5+4,2) = global(5*(i-1)+4,1) - global(5*(i-1)+4,0) ; diff + delete(aerod2) + delete(taerod2) end if global((i-1)*5+5,0)= tdst11 if (compare .ne. "OBS") then global((i-1)*5+5,1)= tdst22 - global((i-1)*5+5,2) = global(5*(i-1)+4,1) - global(5*(i-1)+4,0) ; diff + global((i-1)*5+5,2) = global(5*(i-1)+5,1) - global(5*(i-1)+5,0) ; diff + delete(dst22) + delete(tdst22) end if ; PRINTT THE TABLES diff --git a/atm_diag/tables_soa.ncl b/atm_diag/tables_soa.ncl index a8e844a4..75d8acdb 100644 --- a/atm_diag/tables_soa.ncl +++ b/atm_diag/tables_soa.ncl @@ -35,7 +35,7 @@ begin factor_mam = (/1.,1.,1.,1.,1.,32.066/115.11/) ; SO4 in TgS ivars = dimsizes(vars) mw_bam = (/12.001,144.132,12.011,12.011,12.011,96.06/) ; chem mw (96.06 is from SO4, 115.11 is from NH4HSO4 (mam) - mw_mam = (/12.001,144.132,12.011,12.011,12.011,115.11/) ; chem mw (96.06 is from SO4, 115.11 is from NH4HSO4 (mam) + mw_mam = (/12.001,250.,12.011,12.011,12.011,115.11/) ; chem mw (96.06 is from SO4, 115.11 is from NH4HSO4 (mam) nvars = 6*dimsizes(vars)+2 ; burden, emis, dry dep, wet dep, chem_loss, lifetime global = new ((/nvars,3/),"float", -999) ; global means,diff vars_all = new((/nvars/),"string") @@ -124,8 +124,10 @@ begin ; and then the difference between ; the fields aer = new(2,"string") +aer(:) = "bam" do i = 0, ivars-1 + print(vars(i)) vint1 = new((/ntime1,nlev1,nlat1,nlon1/),"float") vemis1 = new((/ntime1,nlat1,nlon1/),"float") vgas1 = new((/ntime1,nlat1,nlon1/),"float") @@ -136,11 +138,15 @@ aer = new(2,"string") dwf1 = new((/ntime1,nlat1,nlon1/),"float") vaqu1 = new((/ntime1,nlat1,nlon1/),"float") vnuc1 = new((/ntime1,nlat1,nlon1/),"float") + vcaqa1 = new((/ntime1,nlat1,nlon1/),"float") + vcaqb1 = new((/ntime1,nlat1,nlon1/),"float") if (compare .eq. "OBS") then nca = 1 else ; CASE 2 MODEL vint2 = new((/ntime2,nlev2,nlat2,nlon2/),"float") + vcaqa2 = new((/ntime2,nlat2,nlon2/),"float") + vcaqb2 = new((/ntime2,nlat2,nlon2/),"float") vemis2 = new((/ntime2,nlat2,nlon2/),"float") vgas2 = new((/ntime2,nlat2,nlon2/),"float") chmp2 = new((/ntime2,nlev2,nlat2,nlon2/),"float") @@ -201,6 +207,24 @@ aer = new(2,"string") vara = (/"soa_c1","soa_c2"/) aer(ca) = "mam3" end if + if (isfilevar(inptr,"soaff1_a1") .and. isfilevar(inptr,"soaff1_a2")) then + delete(varsoa) + mw(i) = 250. + varsoa = (/"soaff1_a1","soaff1_a2","soaff2_a1","soaff2_a2","soaff3_a1","soaff3_a2","soaff4_a1","soaff4_a2","soaff5_a1","soaff5_a2",\ + "soabb1_a1","soabb1_a2","soabb2_a1","soabb2_a2","soabb3_a1","soabb3_a2","soabb4_a1","soabb4_a2","soabb5_a1","soabb5_a2",\ + "soabg1_a1","soabg1_a2","soabg2_a1","soabg2_a2","soabg3_a1","soabg3_a2","soabg4_a1","soabg4_a2","soabg5_a1","soabg5_a2" /) + vara = (/"soaff1_c1","soaff1_c2","soaff2_c1","soaff2_c2","soaff3_c1","soaff3_c2","soaff4_c1","soaff4_c2","soaff5_c1","soaff5_c2",\ + "soabb1_c1","soabb1_c2","soabb2_c1","soabb2_c2","soabb3_c1","soabb3_c2","soabb4_c1","soabb4_c2","soabb5_c1","soabb5_c2",\ + "soabg1_c1","soabg1_c2","soabg2_c1","soabg2_c2","soabg3_c1","soabg3_c2","soabg4_c1","soabg4_c2","soabg5_c1","soabg5_c2" /) + aer(ca) = "mam3" + end if + if (isfilevar(inptr,"soa1_a1") .and. isfilevar(inptr,"soa1_a2")) then + delete(varsoa) + mw(i) = 250. + varsoa = (/"soa_a1","soa_a2","soa2_a1","soa2_a2","soa3_a1","soa3_a2","soa4_a1","soa4_a2","soa5_a1","soa5_a2"/) + vara = (/"soa1_c1","soa1_c2","soa2_c1","soa2_c2","soa3_c1","soa3_c2","soa4_c1","soa4_c2","soa5_c1","soa5_c2"/) + aer(ca) = "mam3" + end if end if if vars(i).eq."DUST" then if (isfilevar(inptr,"DST01") .and. isfilevar(inptr,"DST02") .and. isfilevar(inptr,"DST03") .and. isfilevar(inptr,"DST04")) then @@ -268,6 +292,13 @@ aer = new(2,"string") vara = (/"pom_c1","pom_c4"/) aer(ca) = "mam3" end if + if (isfilevar(inptr,"pomff1_a1") .and. isfilevar(inptr,"pomff1_a4")) then + delete(vara) + delete(varsoa) + varsoa = (/"pomff1_a1","pomff1_a4","pombb1_a1","pombb1_a4"/) + vara = (/"pomff1_c1","pomff1_c4","pombb1_c1","pombb1_c4"/) + aer(ca) = "mam3" + end if end if end if if vars(i).eq."BC" then @@ -547,7 +578,6 @@ aer = new(2,"string") end if - ; CASE 1 MODEL @@ -638,6 +668,7 @@ aer = new(2,"string") if (compare .ne. "OBS") then ; CASE 2 MODEL + vaqu2 = vcaqa2 + vcaqb2 delp2=(/vint2/) ddd2=dimsizes(delp2) @@ -707,7 +738,12 @@ aer = new(2,"string") if (all(ismissing(tbchmp2)) .or. all(ismissing(tddf2)) .or. all(ismissing(tdwf2))) then lifet2=0. else - lifet2 = tbvar2/(tbchmp2+tddf2-tdwf2 + tbaqu2+tbnuc2)*365. + ; lifet2 = tbvar2/(tbchmp2+tddf2-tdwf2 + tbaqu2+tbnuc2)*365. + if (tddf2-tdwf2).ne.0. then + lifet2 = tbvar2/(tddf2-tdwf2)*365 + else + lifet2=0. + end if end if end if From 85536bef784793ffbac7b28d30bd6d86d83810b5 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Tue, 24 Jan 2017 15:55:56 -0700 Subject: [PATCH 02/27] initial port for cheyenne - waiting for python dependencies to be installed --- Machines/cheyenne_modules | 24 ++++++++++++ Machines/machine_postprocess.xml | 67 ++++++++++++++++++++++++++++++-- Templates/batch_cheyenne.tmpl | 50 ++++++++++++++++++++++++ create_python_env | 2 +- 4 files changed, 138 insertions(+), 5 deletions(-) create mode 100755 Machines/cheyenne_modules create mode 100644 Templates/batch_cheyenne.tmpl diff --git a/Machines/cheyenne_modules b/Machines/cheyenne_modules new file mode 100755 index 00000000..1a4ada5c --- /dev/null +++ b/Machines/cheyenne_modules @@ -0,0 +1,24 @@ +#!/bin/sh -x + +echo "Python boot-strap modules for machine cheyenne" +. /glade/u/apps/opt/lmod/lmod/init/bash + +module unload all-python-libs + +# TODO - get the module versions +module load python/2.7.13 +module load numpy/1.11.3 +module load scipy/?? +module load mpi4py/?? +module load pynio/?? +module load matplotlib/?? +module load intel/16.0.3 +module load netcdf/4.4.1.1 +module load nco/4.6.2 +module load ncl/6.3.0 +module load netcdf4python/?? + +# prepend the virtualenv into the PATH +PATH=/glade/u/apps/contrib/virtualenv/12.0.7:${PATH} +export PATH + diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 8f0c829c..96c78e74 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -3,6 +3,7 @@ + 32 mpirun.lsf @@ -34,30 +35,88 @@ - 32 + 120 16 6 /glade/p/cesm/amwg/amwg_data - 32 + 120 4 /glade/p/cesm/pcwg/ice/data - 32 + 120 12 6 /glade/p/cesm/lmwg/diag/lnd_diag_data - 32 + 120 16 /glade/p/cesm/ + + + 144 + aprun -n {{ pes }} + + f2py + + ifort + -c -g -O2 + -I/glade/u/apps/opt/netcdf/4.2/intel/12.1.5/include + -L/glade/u/apps/opt/netcdf/4.2/intel/12.1.5/lib -lnetcdff -lnetcdf + + + + module restore system + module load python/2.7.13 + + + module load python/2.7.13 + module load numpy/1.11.3 + module load scipy/?? + module load mpi4py/?? + module load pynio/?? + module load matplotlib/?? + module load nco/4.6.2 + module load ncl/6.3.0 + module load netcdf4python/?? + module load intel/16.0.3 + module use /glade/u/apps/contrib/ncl-nightly/modules + module load ncltest-intel + + + + 120 + 24 + 6 + /glade/p/cesm/amwg/amwg_data + + + 120 + 4 + /glade/p/cesm/pcwg/ice/data + + + 120 + 12 + 6 + /glade/p/cesm/lmwg/diag/lnd_diag_data + + + 120 + 24 + /glade/p/cesm/ + + + + + 144 aprun -n {{ pes }} diff --git a/Templates/batch_cheyenne.tmpl b/Templates/batch_cheyenne.tmpl new file mode 100644 index 00000000..38d29dcf --- /dev/null +++ b/Templates/batch_cheyenne.tmpl @@ -0,0 +1,50 @@ +########## +## +## General rules for determining PE counts and distribution across nodes +## --------------------------------------------------------------------- +## +## Averages: +## +## For avearges, set -N equal to the number of variables to be averaged +## plus the number of averages to be computed. The ppn should always +## be set to 15 on cheyenne exclusive nodes. +## +## For ocean hi-resolution or atm data sets with a lot of variables, +## set the netcdf_format XML variable to netcdfLarge, change the queue to +## either geyser (shared) or bigmem (exclusive). For geyser, set -n to 16 +## and ptile to 2 or more. Or, set -n < 16 and ptile to 1 which will +## allow for more memory usage. The -W setting may also need to be +## increased for large data sets. +## +########## +## +## Diagnostics: +## +## For diagnostics, the queue should always be set to geyser or caldera +## with the -n not to exceed the number of plot sets to be created. +## The ptile can be adjusted depending on the size of the input climo +## and average files. +## +########## +## +## Variable Time series generation: +## +## On the cheyenne queues, -N should be set to total number of streams +## to be converted into variable timeseries * 16 minimum tasks per stream +## and ppn = 15. For geyser or caldera, the maximum -n is 16 and the +## ppn can be adjusted based on what the memory requirements might +## be depending on the variable size and number of history time slices +## to be included in the final single variable output file. +## +########## + +#PBS -N {{ processName }} +#PBS -q {{ queue }} +#PBS -l nodes={{ pes }}:ppn={{ ppn }} +#PBS -l walltime={{ wallclock }} + +. /glade/u/apps/opt/lmod/lmod/init/bash + +export MP_TIMEOUT=14400 +export MP_PULSE=1800 +export MP_DEBUG_NOTIMEOUT=yes diff --git a/create_python_env b/create_python_env index 747a45bc..2ae69e5e 100755 --- a/create_python_env +++ b/create_python_env @@ -143,7 +143,7 @@ echo $env if [ -f $env ]; then status="ERROR" info="$progname - ${pp_dir}/cesm-env2 virtual environment already exists. -It is only necessary to create the virtual environment once in the CESM source tree. +It is only necessary to create the virtual environment once for a given machine. All post processing scripts residing in a CASE directory will activate and deactivate the virtual environment as necessary. From 33156dbe16288fa2b71d913edaa6ac3560bfe39c Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Wed, 1 Feb 2017 14:57:29 -0700 Subject: [PATCH 03/27] updates for cheyenne port --- Config/config_postprocess.xml | 16 + Config/config_timeseries.xml | 52 +-- Machines/cheyenne_modules | 23 +- Machines/machine_postprocess.xml | 51 ++- Templates/batch_cheyenne.tmpl | 31 +- cesm_utils/cesm_utils/create_f2py_remap | 2 +- create_python_env | 4 +- examples/ASD-Cheyenne-DP/env_postprocess.xml | 153 +++++++ examples/ASD-Cheyenne-DP/env_timeseries.xml | 422 ++++++++++++++++++ examples/ASD-Cheyenne-DP/timeseries | 128 ++++++ ocn_diag/tool_lib/zon_avg/makefile | 4 +- .../timeseries/cesm_tseries_generator.py | 60 +-- 12 files changed, 837 insertions(+), 109 deletions(-) create mode 100644 examples/ASD-Cheyenne-DP/env_postprocess.xml create mode 100644 examples/ASD-Cheyenne-DP/env_timeseries.xml create mode 100755 examples/ASD-Cheyenne-DP/timeseries diff --git a/Config/config_postprocess.xml b/Config/config_postprocess.xml index 524cd247..08bb49bc 100644 --- a/Config/config_postprocess.xml +++ b/Config/config_postprocess.xml @@ -136,6 +136,22 @@ desc="If TRUE, create only complete chunks of variable timeseries data files as determined by the env_timeseries.xml tseries_filecat_tper and tseries_filecat_n elements. If FALSE, then incomplete chunks of variable timeseries data will be created and appended to upon subsequent running of the timeseries script. Default is TRUE." > + + + + - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Machines/cheyenne_modules b/Machines/cheyenne_modules index 1a4ada5c..4ff8af0b 100755 --- a/Machines/cheyenne_modules +++ b/Machines/cheyenne_modules @@ -1,22 +1,23 @@ #!/bin/sh -x echo "Python boot-strap modules for machine cheyenne" -. /glade/u/apps/opt/lmod/lmod/init/bash +. /glade/u/apps/ch/opt/lmod/7.2.1/lmod/lmod/init/bash -module unload all-python-libs - -# TODO - get the module versions +## Order is important! +module restore system module load python/2.7.13 -module load numpy/1.11.3 -module load scipy/?? -module load mpi4py/?? -module load pynio/?? -module load matplotlib/?? -module load intel/16.0.3 +module load numpy/1.12.0 +module load scipy/0.18.1 +module load impi/5.1.3.210 +module load mpi4py/2.0.0-impi module load netcdf/4.4.1.1 +module load pynio/1.4.1 +module load matplotlib/2.0.0 +module load intel/16.0.3 module load nco/4.6.2 module load ncl/6.3.0 -module load netcdf4python/?? +module load netcdf4-python/1.2.7 +module load cf_units/1.1.3 # prepend the virtualenv into the PATH PATH=/glade/u/apps/contrib/virtualenv/12.0.7:${PATH} diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 96c78e74..4ef66d5e 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -30,6 +30,7 @@ module load netcdf/4.3.0 module load nco/4.4.4 module load netcdf4python/1.1.1 + module load cf_units/1.1 module use /glade/apps/contrib/ncl-nightly/modules module load ncltest-intel @@ -59,17 +60,17 @@ - - 144 - aprun -n {{ pes }} + 72 + mpirun -n {{ pes }} - f2py + + f2py ifort -c -g -O2 - -I/glade/u/apps/opt/netcdf/4.2/intel/12.1.5/include - -L/glade/u/apps/opt/netcdf/4.2/intel/12.1.5/lib -lnetcdff -lnetcdf + -I/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/16.0.3/include + -L/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/16.0.3/lib -lnetcdff -lnetcdf @@ -78,39 +79,41 @@ module load python/2.7.13 - module load numpy/1.11.3 - module load scipy/?? - module load mpi4py/?? - module load pynio/?? - module load matplotlib/?? - module load nco/4.6.2 - module load ncl/6.3.0 - module load netcdf4python/?? + module load numpy/1.12.0 + module load scipy/0.18.1 + module load impi/5.1.3.210 + module load mpi4py/2.0.0-impi + module load pynio/1.4.1 + module load matplotlib/2.0.0 module load intel/16.0.3 + module load netcdf/4.4.1.1 + module load nco/4.6.2 + module load netcdf4-python/1.2.7 + module load cf_units/1.1.3 module use /glade/u/apps/contrib/ncl-nightly/modules module load ncltest-intel - 120 - 24 - 6 + 72 + 36 + 6 /glade/p/cesm/amwg/amwg_data - 120 - 4 + 72 + 4 /glade/p/cesm/pcwg/ice/data - 120 - 12 - 6 + 72 + 12 + 6 /glade/p/cesm/lmwg/diag/lnd_diag_data - 120 - 24 + 72 + 24 /glade/p/cesm/ diff --git a/Templates/batch_cheyenne.tmpl b/Templates/batch_cheyenne.tmpl index 38d29dcf..3b5868ac 100644 --- a/Templates/batch_cheyenne.tmpl +++ b/Templates/batch_cheyenne.tmpl @@ -5,9 +5,7 @@ ## ## Averages: ## -## For avearges, set -N equal to the number of variables to be averaged -## plus the number of averages to be computed. The ppn should always -## be set to 15 on cheyenne exclusive nodes. +## TODO - add some hints for cheyenne queues ## ## For ocean hi-resolution or atm data sets with a lot of variables, ## set the netcdf_format XML variable to netcdfLarge, change the queue to @@ -20,31 +18,30 @@ ## ## Diagnostics: ## -## For diagnostics, the queue should always be set to geyser or caldera -## with the -n not to exceed the number of plot sets to be created. +## TODO - add some hints for cheyenne queues +## NOTE - geyser and caldera aren't accessible from cheyenne yet as of 1/31/2017. +## +## For diagnostics, the queue should always be set to small, geyser or caldera +## with the number of mpi tasks not to exceed the number of plot sets to be created. ## The ptile can be adjusted depending on the size of the input climo -## and average files. +## and average files. ## ########## ## ## Variable Time series generation: ## -## On the cheyenne queues, -N should be set to total number of streams -## to be converted into variable timeseries * 16 minimum tasks per stream -## and ppn = 15. For geyser or caldera, the maximum -n is 16 and the -## ppn can be adjusted based on what the memory requirements might -## be depending on the variable size and number of history time slices -## to be included in the final single variable output file. +## TODO - add some hints for cheyenne queues +## +## Load balance depends on number of history streams, +## variables per stream and chunk sizes. ## ########## #PBS -N {{ processName }} #PBS -q {{ queue }} -#PBS -l nodes={{ pes }}:ppn={{ ppn }} +#PBS -l select=2:ncpus={{ ppn }}:mpiprocs={{ pes }} #PBS -l walltime={{ wallclock }} +#PBS -A {{ project }} -. /glade/u/apps/opt/lmod/lmod/init/bash +. /glade/u/apps/ch/opt/lmod/7.2.1/lmod/lmod/init/bash -export MP_TIMEOUT=14400 -export MP_PULSE=1800 -export MP_DEBUG_NOTIMEOUT=yes diff --git a/cesm_utils/cesm_utils/create_f2py_remap b/cesm_utils/cesm_utils/create_f2py_remap index fb8098d6..42b6203c 100755 --- a/cesm_utils/cesm_utils/create_f2py_remap +++ b/cesm_utils/cesm_utils/create_f2py_remap @@ -153,7 +153,7 @@ def main(options): # call the f2py command as a subprocess try: subprocess.check_call(f2pyCall) - except subprocess.calledProcessError as e: + except subprocess.CalledProcessError as e: print('ERROR: create_f2py_remap subprocess call to {1} failed with error:'.format(e.cmd)) print(' {0} - {1}'.format(e.returncode, e.output)) sys.exit(1) diff --git a/create_python_env b/create_python_env index 2ae69e5e..73853d86 100755 --- a/create_python_env +++ b/create_python_env @@ -212,7 +212,7 @@ echo "$progname - Compiling ocn diagnostics remap.so" # reads XML and call subprocess f2py create_f2py_remap --machine $machine if [ $? -ne 0 ]; then - echo "WARNING: Problem the ocean diagnostics create_f2py_remap in $pp_dir" + echo "WARNING: Problem with ocean diagnostics create_f2py_remap in $pp_dir" fi #---------------------------------------------------------------------- @@ -264,4 +264,4 @@ using the create_postprocess --caseroot [caseroot] script. These tools include: **********************************************************************************************" print_result $status "$info" -exit 0 \ No newline at end of file +exit 0 diff --git a/examples/ASD-Cheyenne-DP/env_postprocess.xml b/examples/ASD-Cheyenne-DP/env_postprocess.xml new file mode 100644 index 00000000..117146ea --- /dev/null +++ b/examples/ASD-Cheyenne-DP/env_postprocess.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/ASD-Cheyenne-DP/env_timeseries.xml b/examples/ASD-Cheyenne-DP/env_timeseries.xml new file mode 100644 index 00000000..9d6870a2 --- /dev/null +++ b/examples/ASD-Cheyenne-DP/env_timeseries.xml @@ -0,0 +1,422 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + atm + True + noleap + + + hist + TRUE + netcdf4c + month_1 + years + 100 + + + hist + TRUE + netcdf4c + day_1 + years + 100 + + + hist + TRUE + netcdf4c + hour_6 + years + 100 + + + hist + FALSE + netcdf4c + hour_3 + years + 1 + + + hist + FALSE + netcdf4c + hour_1 + years + 1 + + + hist + FALSE + netcdf4c + min_30 + years + 1 + + + hist + FALSE + netcdf4c + undefined + years + 10 + + + hist + FALSE + netcdf4c + undefined + years + 10 + + + hist + FALSE + netcdf4c + undefined + years + 10 + + + + ch4vmr + co2vmr + date + date_written + datesec + f11vmr + f12vmr + n2ovmr + ndcur + nscur + nsteph + sol_tsi + time + time_bnds + time_written + + + + + lnd + True + noleap + + + hist + True + netcdf4c + month_1 + years + 100 + + + hist + True + netcdf4c + day_1 + years + 100 + + + hist + FALSE + netcdf4c + hour_6 + years + 1 + + + hist + FALSE + netcdf4c + hour_3 + years + 1 + + + hist + FALSE + netcdf4c + hour_1 + years + 1 + + + hist + FALSE + netcdf4c + min_30 + years + 1 + + + + date_written + mcdate + mcsec + mdcur + mscur + nstep + time + time_bounds + time_written + + + + + rof + True + noleap + + + hist + True + netcdf4c + month_1 + years + 100 + + + hist + True + netcdf4c + day_1 + years + 100 + + + hist + FALSE + netcdf4c + hour_6 + years + 1 + + + hist + FALSE + netcdf4c + hour_3 + years + 1 + + + + date_written + mcdate + mcsec + mdcur + mscur + nstep + time + time_bounds + time_written + + + + + rof + True + noleap + + + hist + FALSE + netcdf4c + month_1 + years + 10 + + + hist + FALSE + netcdf4c + day_1 + years + 5 + + + hist + FALSE + netcdf4c + hour_6 + years + 1 + + + hist + FALSE + netcdf4c + hour_3 + years + 1 + + + + date_written + mcdate + mcsec + mdcur + mscur + nstep + time + time_bounds + time_written + + + + + ice + True + noleap + + + hist + True + netcdf4c + month_1 + years + 100 + + + hist + TRUE + netcdf4c + day_1 + years + 100 + + + + time + time_bounds + + + + + + ocn + True + noleap + + + hist + TRUE + netcdf4c + month_1 + years + 100 + + + hist + TRUE + netcdf4c + day_5 + years + 100 + + + hist + FALSE + netcdf4c + year_1 + years + 10 + + + hist + TRUE + netcdf4c + month_1 + years + 100 + + + hist + TRUE + netcdf4c + year_1 + years + 100 + + + hist + TRUE + netcdf4c + day_5 + years + 100 + + + + time + time_bound + + + + + glc + True + noleap + + + hist + FALSE + netcdf4c + year_1 + years + 10 + + + + + + wav + True + noleap + + + hist + FALSE + netcdf4c + month_1 + years + 10 + + + + + + diff --git a/examples/ASD-Cheyenne-DP/timeseries b/examples/ASD-Cheyenne-DP/timeseries new file mode 100755 index 00000000..72433322 --- /dev/null +++ b/examples/ASD-Cheyenne-DP/timeseries @@ -0,0 +1,128 @@ +#! /bin/bash +##! /usr/bin/env bash +# +# template to activate the virtualenv, call post process program, deactivate virtualenv +# + +########## +## +## General rules for determining PE counts and distribution across nodes +## --------------------------------------------------------------------- +## +## Averages: +## +## For avearges, set -N equal to the number of variables to be averaged +## plus the number of averages to be computed. The ppn should always +## be set to 15 on cheyenne exclusive nodes. +## +## For ocean hi-resolution or atm data sets with a lot of variables, +## set the netcdf_format XML variable to netcdfLarge, change the queue to +## either geyser (shared) or bigmem (exclusive). For geyser, set -n to 16 +## and ptile to 2 or more. Or, set -n < 16 and ptile to 1 which will +## allow for more memory usage. The -W setting may also need to be +## increased for large data sets. +## +########## +## +## Diagnostics: +## +## For diagnostics, the queue should always be set to geyser or caldera +## with the -n not to exceed the number of plot sets to be created. +## The ptile can be adjusted depending on the size of the input climo +## and average files. +## +########## +## +## Variable Time series generation: +## +## On the cheyenne queues, -N should be set to total number of streams +## to be converted into variable timeseries * 16 minimum tasks per stream +## and ppn = 15. For geyser or caldera, the maximum -n is 16 and the +## ppn can be adjusted based on what the memory requirements might +## be depending on the variable size and number of history time slices +## to be included in the final single variable output file. +## +########## + +#PBS -N timeseries +#PBS -q regular +#PBS -l select=2:ncpus=36:mpiprocs=36 +#PBS -l walltime=04:00:00 +#PBS -A acgd0004 +#PBS -o timeseries.stdout +#PBS -e timeseries.stderr +#PBS -m abe +#PBS -M aliceb@ucar.edu + +echo "before init bash" +. /glade/u/apps/ch/opt/lmod/7.2.1/lmod/lmod/init/bash + +export MP_TIMEOUT=14400 +export MP_PULSE=1800 +export MP_DEBUG_NOTIMEOUT=yes + +if [ ! -e /glade/p/work/aliceb/sandboxes/dev/postprocessing_ch/cesm-env2/bin ]; then + echo "*************************************************************************************" + echo "CESM timeseries exiting due to non-existant python virtual environment in" + echo " /glade/p/work/aliceb/sandboxes/dev/postprocessing_ch/cesm-env2/bin" + echo "You must first run:" + echo "$SRCROOT/postprocessing/create_python_env.sh -machine [machine] -cimeroot [$CIMEROOT]" + echo "*************************************************************************************" + exit +fi + + +module restore system + +module load python/2.7.13 + +echo "before activate" +cd /glade/p/work/aliceb/sandboxes/dev/postprocessing_ch/cesm-env2/bin +pwd +. activate + +echo "before module loads" + +module load python/2.7.13 + +module load numpy/1.12.0 + +module load scipy/0.18.1 + +module load impi/5.1.3.210 + +module load mpi4py/2.0.0-impi + +module load pynio/1.4.1 + +module load matplotlib/2.0.0 + +module load intel/16.0.3 + +module load netcdf/4.4.1.1 + +module load nco/4.6.2 + +module load netcdf4-python/1.2.7 + +module load cf_units/1.1.3 + +module use /glade/u/apps/contrib/ncl-nightly/modules + +module load ncltest-intel + +echo "******************************************" +echo "Start timeseries generation $(date)" +echo "******************************************" + +echo "before mpirun" + +mpirun -n 72 ./cesm_tseries_generator.py --debug 2 --backtrace --caseroot /glade/p/work/aliceb/sandboxes/runs/b.e11.BDP.f09_g16.1980-11.011_pp --standalone >> /glade/p/work/aliceb/sandboxes/runs/b.e11.BDP.f09_g16.1980-11.011_pp/logs/timeseries.log 2>&1 + + +echo "******************************************" +echo "End timeseries generation $(date)" +echo "******************************************" + +deactivate +cd /glade/p/work/aliceb/sandboxes/runs/b.e11.BDP.f09_g16.1980-11.011_pp diff --git a/ocn_diag/tool_lib/zon_avg/makefile b/ocn_diag/tool_lib/zon_avg/makefile index 589182d6..44140c84 100644 --- a/ocn_diag/tool_lib/zon_avg/makefile +++ b/ocn_diag/tool_lib/zon_avg/makefile @@ -4,8 +4,8 @@ FC = ifort FFLAGS = -c -g -O2 -INCLUDE = -I/glade/apps/opt/netcdf/4.2/intel/12.1.5/include -LIBS = -L/glade/apps/opt/netcdf/4.2/intel/12.1.5/lib -lnetcdff -lnetcdf +INCLUDE = -I/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/16.0.3/include +LIBS = -L/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/16.0.3/lib -lnetcdff -lnetcdf .SUFFIXES: # Delete the default suffixes .SUFFIXES: .F .F90 .o # Define our suffix list diff --git a/timeseries/timeseries/cesm_tseries_generator.py b/timeseries/timeseries/cesm_tseries_generator.py index 6d53782c..03276258 100755 --- a/timeseries/timeseries/cesm_tseries_generator.py +++ b/timeseries/timeseries/cesm_tseries_generator.py @@ -76,13 +76,14 @@ def commandline_options(): #============================================================================================== # readArchiveXML - read the $CASEROOT/env_timeseries.xml file and build the pyReshaper classes #============================================================================================== -def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, debug, debugMsg): +def readArchiveXML(caseroot, input_rootdir, output_rootdir, casename, standalone, completechunk, debug, debugMsg): """ reads the $CASEROOT/env_timeseries.xml file and builds a fully defined list of reshaper specifications to be passed to the pyReshaper tool. Arguments: caseroot (string) - case root path - dout_s_root (string) - short term archive root path + input_rootdir (string) - rootdir to input raw history files + output_rootdir (string) - rootdir to output single variable time series files casename (string) - casename standalone (boolean) - logical to indicate if postprocessing case is stand-alone or not completechunk (boolean) - end on a ragid boundary if True. Otherwise, do not create incomplete chunks if False @@ -145,7 +146,7 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d # get a list of all the input files for this stream from the archive location history_files = list() - in_file_path = '/'.join( [dout_s_root,rootdir,subdir] ) + in_file_path = '/'.join( [input_rootdir,rootdir,subdir] ) # get XML tseries elements for chunking if file_spec.find("tseries_tper") is not None: @@ -166,7 +167,7 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d # the tseries_tper should be set in using the time_period_freq global file attribute if it exists if time_period_freq is not None: tseries_tper = time_period_freq - tseries_output_dir = '/'.join( [dout_s_root, rootdir, 'proc/tseries', tseries_tper] ) + tseries_output_dir = '/'.join( [output_rootdir, rootdir, 'proc/tseries', tseries_tper] ) debugMsg("tseries_output_dir = {0}".format(tseries_output_dir), header=True) if not os.path.exists(tseries_output_dir): @@ -196,6 +197,8 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d if "year" in tseries_tper: tseries_output_suffix = "."+start_time_parts[0]+"-"+last_time_parts[0]+".nc" elif "month" in tseries_tper: + tseries_output_suffix = "."+start_time_parts[0]+start_time_parts[1]+"-"+last_time_parts[0]+last_time_parts[1]+".nc" + elif "day" in tseries_tper: tseries_output_suffix = "."+start_time_parts[0]+start_time_parts[1]+start_time_parts[2]+"-"+last_time_parts[0]+last_time_parts[1]+last_time_parts[2]+".nc" elif any(freq_string in tseries_tper for freq_string in freq_array): tseries_output_suffix = "."+start_time_parts[0]+start_time_parts[1]+start_time_parts[2]+start_time_parts[3]+"-"+last_time_parts[0]+last_time_parts[1]+last_time_parts[2]+last_time_parts[3]+".nc" @@ -225,7 +228,6 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d specifiers.append(spec) return specifiers,log - def divide_comm(scomm, l_spec): ''' @@ -243,33 +245,32 @@ def divide_comm(scomm, l_spec): ''' min_procs_per_spec = 16 size = scomm.get_size() - rank = scomm.get_rank()-1 + rank = scomm.get_rank() + + if l_spec == 1: + num_of_groups = 1 + else: + num_of_groups = size/min_procs_per_spec + if l_spec < num_of_groups: + num_of_groups = l_spec + # the global master needs to be in its own subcommunicator # ideally it would not be in any, but the divide function # requires all ranks to participate in the call - if rank == -1: - group = ((size/min_procs_per_spec)%l_spec)+1 - if l_spec == 1: - num_of_groups = 1 - else: - num_of_groups = (size/min_procs_per_spec) + if rank == 0: + temp_color = 0 else: - temp_color = (rank // min_procs_per_spec) % l_spec - if l_spec == 1: - num_of_groups = 1 - else: - num_of_groups = (size/min_procs_per_spec) - if (temp_color == num_of_groups): - temp_color = temp_color - 1 - groups = [] - for g in range(0,num_of_groups+1): - groups.append(g) - group = groups[temp_color] - + temp_color = (rank % num_of_groups)+1 + groups = [] + for g in range(0,num_of_groups+1): + groups.append(g) + group = groups[temp_color] + inter_comm,multi_comm = scomm.divide(group) return inter_comm,num_of_groups + #====== # main #====== @@ -292,19 +293,22 @@ def main(options, scomm, rank, size, debug, debugMsg): # loading the specifiers from the env_timeseries.xml only needs to run on the master task (rank=0) if rank == 0: - dout_s_root = cesmEnv['DOUT_S_ROOT'] + tseries_input_rootdir = cesmEnv['TIMESERIES_INPUT_ROOTDIR'] + tseries_output_rootdir = cesmEnv['TIMESERIES_OUTPUT_ROOTDIR'] case = cesmEnv['CASE'] completechunk = cesmEnv['TIMESERIES_COMPLETECHUNK'] if completechunk.upper() in ['T','TRUE']: completechunk = 1 else: completechunk = 0 - specifiers,log = readArchiveXML(caseroot, dout_s_root, case, options.standalone, - completechunk, debug, debugMsg) + specifiers,log = readArchiveXML(caseroot, tseries_input_rootdir, tseries_output_rootdir, + case, options.standalone, completechunk, debug, debugMsg) scomm.sync() # specifiers is a list of pyreshaper specification objects ready to pass to the reshaper specifiers = scomm.partition(specifiers, func=partition.Duplicate(), involved=True) + if rank == 0: + debugMsg("# of Specifiers: "+str(len(specifiers)), header=True, verbosity=1) if len(specifiers) > 0: # setup subcommunicators to do streams and chunks in parallel @@ -336,6 +340,7 @@ def main(options, scomm, rank, size, debug, debugMsg): reshpr = reshaper.create_reshaper(specifiers[i], serial=False, verbosity=debug, simplecomm=inter_comm) # Run the conversion (slice-to-series) process reshpr.convert() + inter_comm.sync() # all subcomm ranks - recv the specifier to work on and call the reshaper else: @@ -347,6 +352,7 @@ def main(options, scomm, rank, size, debug, debugMsg): reshpr = reshaper.create_reshaper(specifiers[i], serial=False, verbosity=debug, simplecomm=inter_comm) # Run the conversion (slice-to-series) process reshpr.convert() + inter_comm.sync() if rank == 0: # Update system log with the dates that were just converted From 839ffa2a00b6a34cb5e939af6145fba8951d6a33 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Fri, 3 Feb 2017 10:47:45 -0700 Subject: [PATCH 04/27] update for pe counts on cheyenne --- Machines/cheyenne_modules | 2 +- Machines/machine_postprocess.xml | 26 ++++++++++++------------ Machines/machine_postprocess.xsd | 8 ++++++++ Templates/batch_cheyenne.tmpl | 2 +- cesm_utils/cesm_utils/cesmEnvLib.py | 6 +++--- cesm_utils/cesm_utils/create_f2py_remap | 13 +++++++----- cesm_utils/cesm_utils/create_postprocess | 26 +++++++++++++++++------- 7 files changed, 53 insertions(+), 30 deletions(-) diff --git a/Machines/cheyenne_modules b/Machines/cheyenne_modules index 4ff8af0b..af4b0c83 100755 --- a/Machines/cheyenne_modules +++ b/Machines/cheyenne_modules @@ -20,6 +20,6 @@ module load netcdf4-python/1.2.7 module load cf_units/1.1.3 # prepend the virtualenv into the PATH -PATH=/glade/u/apps/contrib/virtualenv/12.0.7:${PATH} +PATH=/glade/u/apps/ch/opt/python/2.7.13/gnu/6.2.0/bin:${PATH} export PATH diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 4ef66d5e..1f31b166 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -61,11 +61,11 @@ - 72 + 502 mpirun -n {{ pes }} - - f2py + f2py ifort -c -g -O2 @@ -95,25 +95,25 @@ - 72 - 36 - 6 + 72 + 36 + 6 /glade/p/cesm/amwg/amwg_data - 72 - 4 + 72 + 4 /glade/p/cesm/pcwg/ice/data - 72 - 12 - 6 + 72 + 12 + 6 /glade/p/cesm/lmwg/diag/lnd_diag_data - 72 - 24 + 72 + 24 /glade/p/cesm/ diff --git a/Machines/machine_postprocess.xsd b/Machines/machine_postprocess.xsd index 13672435..40e0bf2b 100644 --- a/Machines/machine_postprocess.xsd +++ b/Machines/machine_postprocess.xsd @@ -5,10 +5,12 @@ + + @@ -27,6 +29,7 @@ + @@ -39,6 +42,7 @@ + @@ -51,6 +55,7 @@ + @@ -63,6 +68,7 @@ + @@ -76,6 +82,7 @@ + @@ -113,6 +120,7 @@ + diff --git a/Templates/batch_cheyenne.tmpl b/Templates/batch_cheyenne.tmpl index 3b5868ac..eb9fc9b3 100644 --- a/Templates/batch_cheyenne.tmpl +++ b/Templates/batch_cheyenne.tmpl @@ -39,7 +39,7 @@ #PBS -N {{ processName }} #PBS -q {{ queue }} -#PBS -l select=2:ncpus={{ ppn }}:mpiprocs={{ pes }} +#PBS -l select={{ nodes }}:ncpus={{ ppn }}:mpiprocs={{ ppn }} #PBS -l walltime={{ wallclock }} #PBS -A {{ project }} diff --git a/cesm_utils/cesm_utils/cesmEnvLib.py b/cesm_utils/cesm_utils/cesmEnvLib.py index 98e47baf..c2d7f039 100755 --- a/cesm_utils/cesm_utils/cesmEnvLib.py +++ b/cesm_utils/cesm_utils/cesmEnvLib.py @@ -19,7 +19,7 @@ try: import lxml.etree as etree except: - import xml.etree.ElementTree as ET + import xml.etree.ElementTree as etree re_val = re.compile(r'\$(\{([A-Za-z0-9_]+)\}|[A-Za-z0-9_]+)') @@ -58,7 +58,7 @@ def readXML(casedir, env_file_list): for efile in env_file_list: env_file = '{0}/{1}'.format(casedir, efile) if os.path.isfile(env_file): - xml_tree = ET.ElementTree() + xml_tree = etree.ElementTree() xml_tree.parse(env_file) for entry_tag in xml_tree.findall('entry'): output[entry_tag.get('id')] = entry_tag.get('value') @@ -220,7 +220,7 @@ def get_machine_name(hostname, xmlFile): machine = None rc, err_msg = checkFile(xmlFile, 'read') if rc: - xml_tree = ET.ElementTree() + xml_tree = etree.ElementTree() xml_tree.parse(xmlFile) # find the matching machine name diff --git a/cesm_utils/cesm_utils/create_f2py_remap b/cesm_utils/cesm_utils/create_f2py_remap index 42b6203c..0f11d295 100755 --- a/cesm_utils/cesm_utils/create_f2py_remap +++ b/cesm_utils/cesm_utils/create_f2py_remap @@ -47,8 +47,6 @@ if sys.version_info[0] == 2: from ConfigParser import SafeConfigParser as config_parser else: from configparser import ConfigParser as config_parser - -# STARTHERE to get the machine from the arglist... # # import modules installed in the virtual environment # @@ -112,7 +110,7 @@ def read_machine_xml(machineName, xmlFile): xmlDict["f2py"] = f2py.text xmlDict["fcompiler"] = f2py.get("fcompiler") xmlDict["f77exec"] = f2py.get("f77exec") - + xmlDict["f90exec"] = f2py.get("f90exec") if not found: err_msg = ('create_f2py_remap ERROR: Invalid machine "{0}" requested for postprocessing'.format(machineName)) raise OSError(err_msg) @@ -141,8 +139,13 @@ def main(options): os.chdir('{0}/ocn_diag/eco_lib'.format(ppdir)) # construct the f2py call list - f2pyCall = ['{0}'.format(f2pyDict['f2py']), '-c', '--fcompiler={0}'.format(f2pyDict['fcompiler']), - '--f77exec={0}'.format(f2pyDict['f77exec']),'-m', 'remap', 'remap.f'] + if len(f2pyDict["f90exec"]) > 0: + f2pyCall = ['{0}'.format(f2pyDict['f2py']), '-c', '--fcompiler={0}'.format(f2pyDict['fcompiler']), + '--f77exec={0}'.format(f2pyDict['f77exec']), '--f90exec={0}'.format(f2pyDict['f90exec']), + '-m', 'remap', 'remap.f'] + else: + f2pyCall = ['{0}'.format(f2pyDict['f2py']), '-c', '--fcompiler={0}'.format(f2pyDict['fcompiler']), + '--f77exec={0}'.format(f2pyDict['f77exec']),'-m', 'remap', 'remap.f'] # remove the remap.so if it already exists try: diff --git a/cesm_utils/cesm_utils/create_postprocess b/cesm_utils/cesm_utils/create_postprocess index 3d2a5d5e..4644a549 100755 --- a/cesm_utils/cesm_utils/create_postprocess +++ b/cesm_utils/cesm_utils/create_postprocess @@ -278,8 +278,10 @@ def read_machine_xml(machineName, xmlFile): machine['timeseries_pes'] = tseries_pes.text machine['timeseries_queue'] = tseries_pes.get('queue').lower() machine['timeseries_ppn'] = tseries_pes.get('pes_per_node').lower() + machine['timeseries_nodes'] = tseries_pes.get('nodes').lower() machine['timeseries_wallclock'] = tseries_pes.get('wallclock').lower() + # get the mpi run command machine['mpi_command'] = xmlmachine.find('mpi_command').text @@ -309,12 +311,14 @@ def read_machine_xml(machineName, xmlFile): avg = comp.find('averages_pes') machine['{0}_averages_pes'.format(compName)] = avg.text machine['{0}_averages_queue'.format(compName)] = avg.get('queue').lower() + machine['{0}_averages_nodes'.format(compName)] = avg.get('nodes').lower() machine['{0}_averages_ppn'.format(compName)] = avg.get('pes_per_node').lower() machine['{0}_averages_wallclock'.format(compName)] = avg.get('wallclock').lower() diags = comp.find('diagnostics_pes') machine['{0}_diagnostics_pes'.format(compName)] = diags.text machine['{0}_diagnostics_queue'.format(compName)] = diags.get('queue').lower() + machine['{0}_diagnostics_nodes'.format(compName)] = diags.get('nodes').lower() machine['{0}_diagnostics_ppn'.format(compName)] = diags.get('pes_per_node').lower() machine['{0}_diagnostics_wallclock'.format(compName)] = diags.get('wallclock').lower() @@ -322,6 +326,7 @@ def read_machine_xml(machineName, xmlFile): if regrid is not None: machine['{0}_regrid_pes'.format(compName)] = regrid.text machine['{0}_regrid_queue'.format(compName)] = regrid.get('queue').lower() + machine['{0}_regrid_nodes'.format(compName)] = regrid.get('nodes').lower() machine['{0}_regrid_ppn'.format(compName)] = regrid.get('pes_per_node').lower() machine['{0}_regrid_wallclock'.format(compName)] = regrid.get('wallclock').lower() @@ -337,7 +342,8 @@ def read_machine_xml(machineName, xmlFile): # create_batch - create the batch files for post processing submission # ------------------------------------------------------------------------------- def create_batch(ppDir, pes, batchTmpl, runTmpl, postProcessCmd, mpiCmd, outFile, processName, - project, pythonpath, caseRoot, reset_modules, modules, queue, ppn, wallclock, options, standalone): + project, pythonpath, caseRoot, reset_modules, modules, queue, ppn, nodes, + wallclock, options, standalone): """create the batch submission files for post processing Arguments: @@ -354,6 +360,9 @@ def create_batch(ppDir, pes, batchTmpl, runTmpl, postProcessCmd, mpiCmd, outFile reset_modules (string) - module commands modules (string) - module commands queue (string) - queue for batch submission + ppn (string) - pes-per-node + nodes (string) - number of nodes + wallclock (string) - wallclock time options (object) - commandline options standalone (boolean) - indicate if this is postprocessing for a standalone case """ @@ -391,6 +400,7 @@ def create_batch(ppDir, pes, batchTmpl, runTmpl, postProcessCmd, mpiCmd, outFile template = templateEnv.get_template( batchTmpl ) templateVars = { 'pes' : pes, 'queue' : queue, + 'nodes' : nodes, 'processName' : processName, 'project' : project, 'ppn' : ppn, @@ -405,6 +415,7 @@ def create_batch(ppDir, pes, batchTmpl, runTmpl, postProcessCmd, mpiCmd, outFile 'reset_modules' : reset_modules, 'modules' : modules, 'pes' : pes, + 'nodes' : nodes, 'mpirun' : mpiCmd, 'pythonpath': pythonpath, 'processName' : processName, @@ -582,8 +593,8 @@ def main(options): create_batch(envDict['POSTPROCESS_PATH'], machine['timeseries_pes'], batch_tmpl, run_tmpl, postProcessCmd, machine['mpi_command'], outFile, processName, machine['project'], machine['pythonpath'], envDict['PP_CASE_PATH'], machine['reset_modules'], machine['modules'], - machine['timeseries_queue'], machine['timeseries_ppn'], machine['timeseries_wallclock'], - options, standalone) + machine['timeseries_queue'], machine['timeseries_ppn'], machine['timeseries_nodes'], + machine['timeseries_wallclock'], options, standalone) # generate the diagnostics batch submit scripts from template files for comp in compList: @@ -595,8 +606,8 @@ def main(options): run_tmpl, postProcessCmd, machine['mpi_command'], outFile, processName, machine['project'], machine['pythonpath'], envDict['PP_CASE_PATH'], machine['reset_modules'], machine['modules'], machine['{0}_averages_queue'.format(comp)], - machine['{0}_averages_ppn'.format(comp)], machine['{0}_averages_wallclock'.format(comp)], - options, standalone) + machine['{0}_averages_ppn'.format(comp)], machine['{0}_averages_nodes'.format(comp)], + machine['{0}_averages_wallclock'.format(comp)], options, standalone) # generate the diagnostics batch submit script postProcessCmd = '{0}_diags_generator.py'.format(comp) @@ -608,6 +619,7 @@ def main(options): machine['reset_modules'], machine['modules'], machine['{0}_diagnostics_queue'.format(comp)], machine['{0}_diagnostics_ppn'.format(comp)], + machine['{0}_diagnostics_nodes'.format(comp)], machine['{0}_diagnostics_wallclock'.format(comp)], options, standalone) # generate the regrid batch submit scripts from template files @@ -620,8 +632,8 @@ def main(options): run_tmpl, postProcessCmd, machine['mpi_command'], outFile, processName, machine['project'], machine['pythonpath'], envDict['PP_CASE_PATH'], machine['reset_modules'], machine['modules'], machine['{0}_regrid_queue'.format(comp)], - machine['{0}_regrid_ppn'.format(comp)], machine['{0}_regrid_wallclock'.format(comp)], - options, standalone) + machine['{0}_regrid_ppn'.format(comp)], machine['{0}_regrid_nodes'.format(comp)], + machine['{0}_regrid_wallclock'.format(comp)], options, standalone) # copy some tools to the PP_CASE_PATH files_to_copy = ['copy_html', 'pp_config', 'create_env_script', 'env_file.xsd'] From 43675bdac09044e89fd08d6dc9815005d5ea32aa Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Fri, 3 Feb 2017 14:09:32 -0700 Subject: [PATCH 05/27] move PROJECT from XML to an optional argument to create_postprocess. --- Machines/machine_postprocess.xml | 3 --- Machines/machine_postprocess.xsd | 2 -- cesm_utils/cesm_utils/create_postprocess | 21 ++++++++++++--------- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 1f31b166..331e80cd 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -14,7 +14,6 @@ -I/glade/apps/opt/netcdf/4.2/intel/12.1.5/include -L/glade/apps/opt/netcdf/4.2/intel/12.1.5/lib -lnetcdff -lnetcdf - module restore system module load python/2.7.7 @@ -72,7 +71,6 @@ -I/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/16.0.3/include -L/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/16.0.3/lib -lnetcdff -lnetcdf - module restore system module load python/2.7.13 @@ -131,7 +129,6 @@ -I-I/opt/cray/netcdf/4.3.2/INTEL/140/include -L/opt/cray/netcdf/4.3.2/INTEL/140/lib -lnetcdff -L/opt/cray/hdf5/1.8.13/INTEL/140/lib -lnetcdf - module restore system module load python/2.7.7 diff --git a/Machines/machine_postprocess.xsd b/Machines/machine_postprocess.xsd index 40e0bf2b..24b12c97 100644 --- a/Machines/machine_postprocess.xsd +++ b/Machines/machine_postprocess.xsd @@ -16,7 +16,6 @@ - @@ -143,7 +142,6 @@ - diff --git a/cesm_utils/cesm_utils/create_postprocess b/cesm_utils/cesm_utils/create_postprocess index 4644a549..69ed5fb9 100755 --- a/cesm_utils/cesm_utils/create_postprocess +++ b/cesm_utils/cesm_utils/create_postprocess @@ -135,6 +135,9 @@ def commandline_options(): parser.add_argument('-cesmtag', '--cesmtag', nargs=1, required=False, help='CESM repository tag (optional)') + parser.add_argument('-project', '--project', nargs=1, required=False, + help='Project code (optional). This setting will override the environment variable setting.') + options = parser.parse_args() return options @@ -290,11 +293,6 @@ def read_machine_xml(machineName, xmlFile): if machine['pythonpath'] is None: machine['pythonpath'] = '' - # get the project from the XML first then override with env setting - machine['project'] = xmlmachine.find('project').text - if os.getenv('PROJECT') is not None: - machine['project'] = os.getenv('PROJECT') - # loop through the reset_module list for mod in xmlmachine.findall("reset_modules/module"): machine['reset_modules'].append(mod.text) @@ -490,6 +488,11 @@ def initialize_main(envDict, options, standalone): if options.cesmtag: envDict['CESM_TAG'] = options.cesmtag[0] + # set the project code + envDict['PROJECT'] = os.getenv('PROJECT') + if options.project: + envDict['PROJECT'] = options.project[0] + return envDict # ------------------------------------------------------------------------------- @@ -591,7 +594,7 @@ def main(options): outFile = '{0}/{1}'.format(envDict['PP_CASE_PATH'],processName) create_batch(envDict['POSTPROCESS_PATH'], machine['timeseries_pes'], batch_tmpl, run_tmpl, postProcessCmd, - machine['mpi_command'], outFile, processName, machine['project'], machine['pythonpath'], + machine['mpi_command'], outFile, processName, envDict['PROJECT'], machine['pythonpath'], envDict['PP_CASE_PATH'], machine['reset_modules'], machine['modules'], machine['timeseries_queue'], machine['timeseries_ppn'], machine['timeseries_nodes'], machine['timeseries_wallclock'], options, standalone) @@ -604,7 +607,7 @@ def main(options): outFile = '{0}/{1}'.format(envDict['PP_CASE_PATH'], processName) create_batch(envDict['POSTPROCESS_PATH'], machine['{0}_averages_pes'.format(comp)], batch_tmpl, run_tmpl, postProcessCmd, machine['mpi_command'], outFile, processName, - machine['project'], machine['pythonpath'], envDict['PP_CASE_PATH'], + envDict['PROJECT'], machine['pythonpath'], envDict['PP_CASE_PATH'], machine['reset_modules'], machine['modules'], machine['{0}_averages_queue'.format(comp)], machine['{0}_averages_ppn'.format(comp)], machine['{0}_averages_nodes'.format(comp)], machine['{0}_averages_wallclock'.format(comp)], options, standalone) @@ -615,7 +618,7 @@ def main(options): outFile = '{0}/{1}'.format(envDict['PP_CASE_PATH'], processName) create_batch(envDict['POSTPROCESS_PATH'], machine['{0}_diagnostics_pes'.format(comp)], batch_tmpl, run_tmpl, postProcessCmd, machine['mpi_command'], outFile, processName, - machine['project'], machine['pythonpath'], envDict['PP_CASE_PATH'], + envDict['PROJECT'], machine['pythonpath'], envDict['PP_CASE_PATH'], machine['reset_modules'], machine['modules'], machine['{0}_diagnostics_queue'.format(comp)], machine['{0}_diagnostics_ppn'.format(comp)], @@ -630,7 +633,7 @@ def main(options): outFile = '{0}/{1}'.format(envDict['PP_CASE_PATH'], processName) create_batch(envDict['POSTPROCESS_PATH'], machine['{0}_regrid_pes'.format(comp)], batch_tmpl, run_tmpl, postProcessCmd, machine['mpi_command'], outFile, processName, - machine['project'], machine['pythonpath'], envDict['PP_CASE_PATH'], + envDict['PROJECT'], machine['pythonpath'], envDict['PP_CASE_PATH'], machine['reset_modules'], machine['modules'], machine['{0}_regrid_queue'.format(comp)], machine['{0}_regrid_ppn'.format(comp)], machine['{0}_regrid_nodes'.format(comp)], machine['{0}_regrid_wallclock'.format(comp)], options, standalone) From d90eff74399dad3260396b391410fdec4083a7b0 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Mon, 6 Feb 2017 17:13:33 -0700 Subject: [PATCH 06/27] fixes for cheyenne PE settings to be consistent with cylc settings for ASD runs --- Config/config_postprocess.xml | 18 +++++++++--------- Machines/machine_postprocess.xml | 19 +++++++++---------- Templates/batch_cheyenne.tmpl | 1 + 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/Config/config_postprocess.xml b/Config/config_postprocess.xml index 08bb49bc..83b29adf 100644 --- a/Config/config_postprocess.xml +++ b/Config/config_postprocess.xml @@ -131,7 +131,7 @@ @@ -155,7 +155,7 @@ @@ -163,7 +163,7 @@ @@ -171,7 +171,7 @@ @@ -179,7 +179,7 @@ @@ -187,7 +187,7 @@ @@ -203,7 +203,7 @@ @@ -211,7 +211,7 @@ @@ -219,7 +219,7 @@ diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 331e80cd..1745ff89 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -47,7 +47,7 @@ 120 - 12 + 12 6 /glade/p/cesm/lmwg/diag/lnd_diag_data @@ -60,7 +60,7 @@ - 502 + 72 mpirun -n {{ pes }} module load nco/4.6.2 module load netcdf4-python/1.2.7 module load cf_units/1.1.3 - module use /glade/u/apps/contrib/ncl-nightly/modules - module load ncltest-intel + module load ncl/6.3.0 72 - 36 - 6 + 36 + 6 /glade/p/cesm/amwg/amwg_data 72 - 4 + 4 /glade/p/cesm/pcwg/ice/data 72 - 12 - 6 + 12 + 6 /glade/p/cesm/lmwg/diag/lnd_diag_data 72 - 24 + 24 /glade/p/cesm/ diff --git a/Templates/batch_cheyenne.tmpl b/Templates/batch_cheyenne.tmpl index eb9fc9b3..57f8bef6 100644 --- a/Templates/batch_cheyenne.tmpl +++ b/Templates/batch_cheyenne.tmpl @@ -45,3 +45,4 @@ . /glade/u/apps/ch/opt/lmod/7.2.1/lmod/lmod/init/bash +export I_MPI_DEVICE=rdma From b88c01291dc9ff1d087a8bb36e04afea6c1a97af Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Mon, 6 Feb 2017 20:05:27 -0700 Subject: [PATCH 07/27] minor update for f2py compile on yellowstone --- cesm_utils/cesm_utils/create_f2py_remap | 4 +++- ocn_diag/tool_lib/zon_avg/makefile | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/cesm_utils/cesm_utils/create_f2py_remap b/cesm_utils/cesm_utils/create_f2py_remap index 0f11d295..b353cafa 100755 --- a/cesm_utils/cesm_utils/create_f2py_remap +++ b/cesm_utils/cesm_utils/create_f2py_remap @@ -110,7 +110,9 @@ def read_machine_xml(machineName, xmlFile): xmlDict["f2py"] = f2py.text xmlDict["fcompiler"] = f2py.get("fcompiler") xmlDict["f77exec"] = f2py.get("f77exec") - xmlDict["f90exec"] = f2py.get("f90exec") + xmlDict["f90exec"] = '' + if 'f90exec' in f2py.attrib: + xmlDict["f90exec"] = f2py.get("f90exec") if not found: err_msg = ('create_f2py_remap ERROR: Invalid machine "{0}" requested for postprocessing'.format(machineName)) raise OSError(err_msg) diff --git a/ocn_diag/tool_lib/zon_avg/makefile b/ocn_diag/tool_lib/zon_avg/makefile index 44140c84..589182d6 100644 --- a/ocn_diag/tool_lib/zon_avg/makefile +++ b/ocn_diag/tool_lib/zon_avg/makefile @@ -4,8 +4,8 @@ FC = ifort FFLAGS = -c -g -O2 -INCLUDE = -I/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/16.0.3/include -LIBS = -L/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/16.0.3/lib -lnetcdff -lnetcdf +INCLUDE = -I/glade/apps/opt/netcdf/4.2/intel/12.1.5/include +LIBS = -L/glade/apps/opt/netcdf/4.2/intel/12.1.5/lib -lnetcdff -lnetcdf .SUFFIXES: # Delete the default suffixes .SUFFIXES: .F .F90 .o # Define our suffix list From bf1b11b5f48b0908a13870aeb96a22342fb38e4f Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Tue, 7 Feb 2017 09:26:29 -0700 Subject: [PATCH 08/27] fix for optional nodes attribute in XML elements --- cesm_utils/cesm_utils/create_postprocess | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/cesm_utils/cesm_utils/create_postprocess b/cesm_utils/cesm_utils/create_postprocess index 69ed5fb9..74608dc3 100755 --- a/cesm_utils/cesm_utils/create_postprocess +++ b/cesm_utils/cesm_utils/create_postprocess @@ -281,7 +281,9 @@ def read_machine_xml(machineName, xmlFile): machine['timeseries_pes'] = tseries_pes.text machine['timeseries_queue'] = tseries_pes.get('queue').lower() machine['timeseries_ppn'] = tseries_pes.get('pes_per_node').lower() - machine['timeseries_nodes'] = tseries_pes.get('nodes').lower() + machine['timeseries_nodes'] = '' + if 'nodes' in tseries_pes.attrib: + machine['timeseries_nodes'] = tseries_pes.get('nodes').lower() machine['timeseries_wallclock'] = tseries_pes.get('wallclock').lower() @@ -309,14 +311,18 @@ def read_machine_xml(machineName, xmlFile): avg = comp.find('averages_pes') machine['{0}_averages_pes'.format(compName)] = avg.text machine['{0}_averages_queue'.format(compName)] = avg.get('queue').lower() - machine['{0}_averages_nodes'.format(compName)] = avg.get('nodes').lower() + machine['{0}_averages_nodes'.format(compName)] = '' + if 'nodes' in avg.attrib: + machine['{0}_averages_nodes'.format(compName)] = avg.get('nodes').lower() machine['{0}_averages_ppn'.format(compName)] = avg.get('pes_per_node').lower() machine['{0}_averages_wallclock'.format(compName)] = avg.get('wallclock').lower() diags = comp.find('diagnostics_pes') machine['{0}_diagnostics_pes'.format(compName)] = diags.text machine['{0}_diagnostics_queue'.format(compName)] = diags.get('queue').lower() - machine['{0}_diagnostics_nodes'.format(compName)] = diags.get('nodes').lower() + machine['{0}_diagnostics_nodes'.format(compName)] = '' + if 'nodes' in diags.attrib: + machine['{0}_diagnostics_nodes'.format(compName)] = diags.get('nodes').lower() machine['{0}_diagnostics_ppn'.format(compName)] = diags.get('pes_per_node').lower() machine['{0}_diagnostics_wallclock'.format(compName)] = diags.get('wallclock').lower() @@ -324,7 +330,9 @@ def read_machine_xml(machineName, xmlFile): if regrid is not None: machine['{0}_regrid_pes'.format(compName)] = regrid.text machine['{0}_regrid_queue'.format(compName)] = regrid.get('queue').lower() - machine['{0}_regrid_nodes'.format(compName)] = regrid.get('nodes').lower() + machine['{0}_regrid_nodes'.format(compName)] = '' + if 'nodes' in regrid.attrib: + machine['{0}_regrid_nodes'.format(compName)] = regrid.get('nodes').lower() machine['{0}_regrid_ppn'.format(compName)] = regrid.get('pes_per_node').lower() machine['{0}_regrid_wallclock'.format(compName)] = regrid.get('wallclock').lower() From b36978d0679d1b077a760efa2dabb93ef48b66b5 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Fri, 24 Feb 2017 11:53:10 -0700 Subject: [PATCH 09/27] update for cf_units on yellowstone to include in PYTHONPATH rather than as a module load which is not longer necessary --- Machines/machine_postprocess.xml | 3 +-- Tools/ration_example.py | 16 ++++++++++++++-- Tools/ration_script | 6 +++++- 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 1745ff89..3674bff2 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -6,7 +6,7 @@ 32 mpirun.lsf - + /glade/apps/opt/python/2.7.7/gnu-westmere/4.8.2/lib/python2.7/site-packages f2py ifort @@ -29,7 +29,6 @@ module load netcdf/4.3.0 module load nco/4.4.4 module load netcdf4python/1.1.1 - module load cf_units/1.1 module use /glade/apps/contrib/ncl-nightly/modules module load ncltest-intel diff --git a/Tools/ration_example.py b/Tools/ration_example.py index f474b495..649c599d 100755 --- a/Tools/ration_example.py +++ b/Tools/ration_example.py @@ -1,8 +1,18 @@ #!/usr/bin/env python +import sys -from asaptools import simplecomm +try: + from asaptools import simplecomm +except: + print 'asaptools not loaded. exiting...' + sys.exit(1) + +try: + scomm = simplecomm.create_comm(serial=False) +except: + print 'unable to create scomm. exiting...' + sys.exit(1) -scomm = simplecomm.create_comm() rank = scomm.get_rank() size = scomm.get_size() @@ -30,3 +40,5 @@ scomm.sync() if scomm.is_manager(): print 'Done.' + +sys.exit(0) diff --git a/Tools/ration_script b/Tools/ration_script index 895c3a04..c82eb077 100755 --- a/Tools/ration_script +++ b/Tools/ration_script @@ -8,7 +8,7 @@ #BSUB -q geyser #BSUB -N #BSUB -a poe -#BSUB -J CESM_postprocessing +#BSUB -J ration_script #BSUB -W 00:02 #BSUB -P P93300606 @@ -16,8 +16,12 @@ export MP_LABELIO=yes +module load python/2.7.7 + . /glade/p/work/aliceb/sandboxes/dev/postprocessing/cesm-env2/bin/activate +module load mpi4py/2.0.0 + mpirun.lsf ./ration_example.py >> ./ration.log deactivate From 65079cfeab4e12100e8ec6cb074ba48ae4de58bd Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Thu, 9 Mar 2017 20:38:30 -0700 Subject: [PATCH 10/27] update to pp_config to support query of batch info for cylc --- Templates/cylc_batch_cheyenne.tmpl | 5 + Templates/cylc_batch_edison.tmpl | 7 ++ Templates/cylc_batch_yellowstone.tmpl | 8 ++ Tools/pp_config | 128 ++++++++++++++++++++++++-- Tools/ration_script | 7 ++ 5 files changed, 148 insertions(+), 7 deletions(-) create mode 100644 Templates/cylc_batch_cheyenne.tmpl create mode 100644 Templates/cylc_batch_edison.tmpl create mode 100644 Templates/cylc_batch_yellowstone.tmpl diff --git a/Templates/cylc_batch_cheyenne.tmpl b/Templates/cylc_batch_cheyenne.tmpl new file mode 100644 index 00000000..dd9750d0 --- /dev/null +++ b/Templates/cylc_batch_cheyenne.tmpl @@ -0,0 +1,5 @@ +-N {{ processName }} +-q {{ queue }} +-l select={{ nodes }}:ncpus={{ ppn }}:mpiprocs={{ ppn }} +-l walltime={{ wallclock }} +-A {{ project }} diff --git a/Templates/cylc_batch_edison.tmpl b/Templates/cylc_batch_edison.tmpl new file mode 100644 index 00000000..21e051ac --- /dev/null +++ b/Templates/cylc_batch_edison.tmpl @@ -0,0 +1,7 @@ +-N {{ processName }} +-q {{ queue }} +-l nodes={{ pes }}:ppn={{ ppn }} +-l walltime={{ wallclock }} +-o {{ processName }}.stdout +-e {{ processName }}.stderr + diff --git a/Templates/cylc_batch_yellowstone.tmpl b/Templates/cylc_batch_yellowstone.tmpl new file mode 100644 index 00000000..47eec4ae --- /dev/null +++ b/Templates/cylc_batch_yellowstone.tmpl @@ -0,0 +1,8 @@ +-n {{ pes }} +-R "span[ptile={{ ppn }}]" +-q {{ queue }} +-N +-a poe +-J {{ processName }} +-W {{ wallclock }} +-P {{ project }} diff --git a/Tools/pp_config b/Tools/pp_config index 864703e8..b2a941d3 100755 --- a/Tools/pp_config +++ b/Tools/pp_config @@ -89,7 +89,12 @@ else: # from cesm_utils import cesmEnvLib, processXmlLib from asaptools import vprinter +import jinja2 +# global variables +_scripts = ['timeseries','averages','regrid','diagnostics'] +_machines = ['yellowstone','cheyenne','edison'] +_comps = ['atm','ice','lnd','ocn'] # ------------------------------------------------------------------------------- # User input @@ -126,16 +131,112 @@ def commandline_options(): help=('print only the value of the variable.' 'Works in conjunction with the --get option')) + parser.add_argument('--getbatch', nargs=1, required=False, choices=_scripts, + help='batch script option.') + + parser.add_argument('-comp', '--comp', nargs=1, required=False, choices=_comps, + help='batch script option.') + + parser.add_argument('-machine', '--machine', nargs=1, required=False, choices=_machines, + help='machine name used in conjunction with --getbatch option.') + options = parser.parse_args() return options +# ------------------------------------------------------------------------------- +# get_batch +# ------------------------------------------------------------------------------- +def get_batch(pp_path, script, mach, comp, project, debugMsg): + """ + print the batch directives only for given script and machine + """ + found = False + mach_dict = dict() + xml_file = os.path.join(pp_path, 'Machines', 'machine_postprocess.xml') + xml_tree = etree.ElementTree() + xml_tree.parse(xml_file) + for xmlmachine in xml_tree.findall('machine'): + if mach.lower() == xmlmachine.get('name').lower(): + if script == 'timeseries': + tseries_pes = xmlmachine.find('timeseries_pes') + pes = tseries_pes.text + queue = tseries_pes.get('queue').lower() + ppn = tseries_pes.get('pes_per_node').lower() + wallclock = tseries_pes.get('wallclock').lower() + nodes = '' + if 'nodes' in tseries_pes.attrib: + nodes = tseries_pes.get('nodes').lower() + found = True + elif script in ['averages','regrid','diagnostics'] and len(comp) > 0: + for comp_xml in xmlmachine.findall("components/component"): + compName = comp_xml.get("name").lower() + + if script == 'averages' and compName == comp: + avg = comp_xml.find('averages_pes') + pes = avg.text + queue = avg.get('queue').lower() + ppn = avg.get('pes_per_node').lower() + wallclock = avg.get('wallclock').lower() + nodes = '' + if 'nodes' in avg.attrib: + nodes = avg.get('nodes').lower() + found = True + + if script == 'diagnostics' and compName == comp: + diags = comp_xml.find('diagnostics_pes') + pes = diags.text + queue = diags.get('queue').lower() + ppn = diags.get('pes_per_node').lower() + wallclock = diags.get('wallclock').lower() + nodes = '' + if 'nodes' in diags.attrib: + nodes = diags.get('nodes').lower() + found = True + + if script == 'regrid' and compName == comp: + regrid = comp_xml.find('regrid_pes') + if regrid is not None: + pes = regrid.text + queue = regrid.get('queue').lower() + ppn = regrid.get('pes_per_node').lower() + wallclock = regrid.get('wallclock').lower() + nodes = '' + if 'nodes' in regrid.attrib: + nodes = regrid.get('nodes').lower() + found = True + + # load up the template and print it out + if found: + batchTmpl = 'cylc_batch_{0}.tmpl'.format(mach) + templateLoader = jinja2.FileSystemLoader( searchpath='{0}/Templates'.format(pp_path) ) + templateEnv = jinja2.Environment( loader=templateLoader ) + template = templateEnv.get_template( batchTmpl ) + templateVars = { 'pes' : pes, + 'queue' : queue, + 'nodes' : nodes, + 'processName' : script, + 'project' : project, + 'ppn' : ppn, + 'wallclock' : wallclock } + + # render this template into the batchdirectives string + batchdirectives = template.render( templateVars ) + + # print the batchdirectives + print (batchdirectives) + else: + msg = "pp_config INFO: no matching XML records found for " \ + "comp='{0}', script='{1}' on machine='{2}' in XML file='{3}'".format(comp, script, mach, xml_file) + print (msg) + + return 0 + # ------------------------------------------------------------------------------- # main # ------------------------------------------------------------------------------- def main(options): - debugMsg = vprinter.VPrinter(header='', verbosity=0) if options.debug: header = 'pp_config: DEBUG... ' @@ -144,7 +245,7 @@ def main(options): case_dir = options.caseroot[0] debugMsg("Using case directory : {0}".format(case_dir), header=True, verbosity=1) os.chdir(case_dir) - + xml_filenames = glob.glob('*.xml') xml_trees = [] @@ -164,9 +265,7 @@ def main(options): for tree in xml_trees: xml_processor.xml_to_dict(tree, envDict) - # # 'get' user input - # if options.get: entry_id = options.get[0] if options.value: @@ -174,9 +273,7 @@ def main(options): else: print("{0}={1}".format(entry_id, envDict[entry_id])) - # # 'set' user input - # if options.set: key_value = options.set[0].split('=') new_entry_id = key_value[0].strip() @@ -190,8 +287,25 @@ def main(options): xml_processor.write(envDict, comp.lower(), new_entry_id, new_entry_value) -if __name__ == "__main__": + # print out the batch information + if options.getbatch: + script = options.getbatch[0] + if not options.machine: + print("Option --getbatch requires a matching --machine option be specified") + return 1 + mach = options.machine[0] + comp = '' + if script != 'timeseries': + if not options.comp: + print("Option --getbatch with script not set to 'timeseries' requires --comp option be spcified") + return 1 + comp = options.comp[0] + get_batch(envDict['POSTPROCESS_PATH'], script, mach, comp, envDict['PROJECT'], debugMsg) + + return 0 +# ------------------------------------------------------------------------------- +if __name__ == "__main__": try: options = commandline_options() diff --git a/Tools/ration_script b/Tools/ration_script index c82eb077..1e38b2b2 100755 --- a/Tools/ration_script +++ b/Tools/ration_script @@ -24,6 +24,13 @@ module load mpi4py/2.0.0 mpirun.lsf ./ration_example.py >> ./ration.log +status=$? +echo $status + deactivate +echo $status + + + From 4f11aa98dacb843d143b842627e541f8778380d3 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Fri, 10 Mar 2017 13:00:04 -0700 Subject: [PATCH 11/27] update NCL version on yellowstone and add some changes for cam-chem plots. --- Machines/machine_postprocess.xml | 3 +-- Machines/yellowstone_modules | 2 +- atm_diag/profiles_aircraft_noaa.ncl | 4 ++-- diagnostics/diagnostics/atm/Plots/cset7.py | 2 +- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 3674bff2..4d32506b 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -29,8 +29,7 @@ module load netcdf/4.3.0 module load nco/4.4.4 module load netcdf4python/1.1.1 - module use /glade/apps/contrib/ncl-nightly/modules - module load ncltest-intel + module load ncl/6.4.0 diff --git a/Machines/yellowstone_modules b/Machines/yellowstone_modules index 75218ac1..5e373619 100755 --- a/Machines/yellowstone_modules +++ b/Machines/yellowstone_modules @@ -14,7 +14,7 @@ module load matplotlib/1.4.3 module load intel/12.1.5 module load netcdf/4.3.0 module load nco/4.4.4 -module load ncl/6.3.0 +module load ncl/6.4.0 module load netcdf4python/1.1.1 # prepend the virtualenv into the PATH diff --git a/atm_diag/profiles_aircraft_noaa.ncl b/atm_diag/profiles_aircraft_noaa.ncl index c7021697..6c197c86 100644 --- a/atm_diag/profiles_aircraft_noaa.ncl +++ b/atm_diag/profiles_aircraft_noaa.ncl @@ -215,8 +215,8 @@ do re = 0, nregions-1 ; aircraft(re1(re)) ; open obs data file if (vars(v).ne. "OH" .and. vars(v).ne."SAD_TROP" .and. vars(v).ne."CLOUD") then - dir_in = "/glade/p/acd/tilmes/amwg/amwg_diag_20131004_work/obs_data/aircraft/" - ; dir_in = "$OBS_DATA/cam-chem/aircraft/NOAA/" +;; dir_in = "/glade/p/acd/tilmes/amwg/amwg_diag_20131004_work/obs_data/aircraft/" + dir_in = "$OBS_DATA/cam-chem/aircraft/NOAA/" rin1 = addfile(dir_in+variablen(v)+aircraft(re1(re))+".nc","r") air1_med = rin1->prof_med air1_p25 = rin1->prof_p25 diff --git a/diagnostics/diagnostics/atm/Plots/cset7.py b/diagnostics/diagnostics/atm/Plots/cset7.py index 21ed2708..f1676805 100644 --- a/diagnostics/diagnostics/atm/Plots/cset7.py +++ b/diagnostics/diagnostics/atm/Plots/cset7.py @@ -38,7 +38,7 @@ def __init__(self,seas,env): self._name = 'CSET 7 - Surface comparison ozone, co, improve' self._shortname = 'CSET7' self._template_file = 'cset7.tmpl' - self.ncl_scripts = ['plot_improve_scatter_pdf_mam3.ncl'] + self.ncl_scripts = ['plot_improve_scatter_pdf.ncl'] self.plot_env = env.copy() def check_prerequisites(self, env): From 3e9561179de59ec35bccb374eab1456964f02ac7 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Sun, 12 Mar 2017 21:23:56 -0600 Subject: [PATCH 12/27] update to make sure cset1 tables get moved correctly --- diagnostics/diagnostics/atm/atm_diags_bc.py | 5 +++++ diagnostics/diagnostics/atm/model_vs_model.py | 2 +- diagnostics/diagnostics/atm/model_vs_obs.py | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/diagnostics/diagnostics/atm/atm_diags_bc.py b/diagnostics/diagnostics/atm/atm_diags_bc.py index 15d4a7c7..cf6f8c9e 100755 --- a/diagnostics/diagnostics/atm/atm_diags_bc.py +++ b/diagnostics/diagnostics/atm/atm_diags_bc.py @@ -72,6 +72,11 @@ def setup_workdir(self, env, t, scomm): if not rc1: os.symlink(climo_file,new_fn) + if (scomm.is_manager()): + print("DEBUG atm_diags_bc: workdir = %s"%workdir) + print("DEBUG atm_diags_bc: t = %s"%t) + print("DEBUG atm_diags_bc: env[t_path_climo] = %s"%env[t+'_path_climo']) + return env def check_prerequisites(self, env, scomm): diff --git a/diagnostics/diagnostics/atm/model_vs_model.py b/diagnostics/diagnostics/atm/model_vs_model.py index ebde2473..f5dec22a 100644 --- a/diagnostics/diagnostics/atm/model_vs_model.py +++ b/diagnostics/diagnostics/atm/model_vs_model.py @@ -234,7 +234,7 @@ def run_diagnostics(self, env, scomm): if 'set_5' == plot_set or 'set_6' == plot_set: glob_set = plot_set.replace('_','') plot_set = 'set5_6' - elif 'set_1' == plot_set: + elif 'set_1' == plot_set or 'cset_1' == plot_set: glob_set = 'table_' plot_set = plot_set.replace('_','') elif 'sets' == plot_set: diff --git a/diagnostics/diagnostics/atm/model_vs_obs.py b/diagnostics/diagnostics/atm/model_vs_obs.py index 67335909..109a21a0 100644 --- a/diagnostics/diagnostics/atm/model_vs_obs.py +++ b/diagnostics/diagnostics/atm/model_vs_obs.py @@ -227,7 +227,7 @@ def run_diagnostics(self, env, scomm): if 'set_5' == plot_set or 'set_6' == plot_set: glob_set = plot_set.replace('_','') plot_set = 'set5_6' - elif 'set_1' == plot_set: + elif 'set_1' == plot_set or 'cset_1' == plot_set: glob_set = 'table_' plot_set = plot_set.replace('_','') elif 'sets' == plot_set: From aead1224dcb68338f768b57f2ee157a7b419d9ac Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Mon, 13 Mar 2017 17:45:59 -0600 Subject: [PATCH 13/27] fix for CISM timeseries; update for gx1v7 ocn diags - need to test; update for cam diags cset1 and set1 tables - need to test --- Config/config_timeseries.xml | 3 + Tools/ration.log | 64 ++++++++++ atm_diag/plot_improve_scatter_pdf.ncl | 119 ++++++++++++------ atm_diag/profiles_aircraft_noaa.ncl | 2 +- diagnostics/diagnostics/atm/model_vs_model.py | 1 + diagnostics/diagnostics/atm/model_vs_obs.py | 1 + .../ocn/Plots/zonal_average_3d_fields.py | 1 + ocn_diag/ncl_lib/field_3d_za.ncl | 18 ++- ocn_diag/ncl_lib/field_3d_za_diff.ncl | 15 ++- .../timeseries/cesm_tseries_generator.py | 2 +- timeseries/timeseries/chunking.py | 40 +++++- 11 files changed, 215 insertions(+), 51 deletions(-) create mode 100644 Tools/ration.log diff --git a/Config/config_timeseries.xml b/Config/config_timeseries.xml index 7116c085..a9decef5 100644 --- a/Config/config_timeseries.xml +++ b/Config/config_timeseries.xml @@ -391,6 +391,9 @@ 10 + + time + diff --git a/Tools/ration.log b/Tools/ration.log new file mode 100644 index 00000000..247a75d7 --- /dev/null +++ b/Tools/ration.log @@ -0,0 +1,64 @@ +Execute poe command line: poe ./ration_example.py +0:0/4: Sent 0 +1:1/4: Recvd 0 +1:1/4: Recvd 1 +0:0/4: Sent 1 +0:0/4: Sent 2 +1:1/4: Recvd 2 +0:0/4: Sent 3 +1:1/4: Recvd 3 +0:0/4: Sent 4 +1:1/4: Recvd 4 +0:0/4: Sent 5 +1:1/4: Recvd 5 +0:0/4: Sent 6 +1:1/4: Recvd 6 +0:0/4: Sent 7 +1:1/4: Recvd 7 +0:0/4: Sent 8 +1:1/4: Recvd 8 +0:0/4: Sent 9 +1:1/4: Recvd 9 +0:0/4: Sent None +1:1/4: Recvd None +1:1/4: Out of loop +0:0/4: Sent None +0:0/4: Sent None +0:0/4: Out of loop +2:2/4: Recvd None +2:2/4: Out of loop +3:3/4: Recvd None +3:3/4: Out of loop +0:Done. +Execute poe command line: poe ./ration_example.py +0:0/4: Sent 0 +1:1/4: Recvd 0 +1:1/4: Recvd 1 +0:0/4: Sent 1 +0:0/4: Sent 2 +1:1/4: Recvd 2 +0:0/4: Sent 3 +1:1/4: Recvd 3 +0:0/4: Sent 4 +1:1/4: Recvd 4 +0:0/4: Sent 5 +0:0/4: Sent 6 +1:1/4: Recvd 6 +0:0/4: Sent 7 +2:2/4: Recvd 5 +0:0/4: Sent 8 +1:1/4: Recvd 8 +3:3/4: Recvd 7 +0:0/4: Sent 9 +2:2/4: Recvd 9 +0:0/4: Sent None +1:1/4: Recvd None +1:1/4: Out of loop +0:0/4: Sent None +3:3/4: Recvd None +3:3/4: Out of loop +0:0/4: Sent None +0:0/4: Out of loop +2:2/4: Recvd None +2:2/4: Out of loop +0:Done. diff --git a/atm_diag/plot_improve_scatter_pdf.ncl b/atm_diag/plot_improve_scatter_pdf.ncl index 4a90059d..1b570de5 100644 --- a/atm_diag/plot_improve_scatter_pdf.ncl +++ b/atm_diag/plot_improve_scatter_pdf.ncl @@ -112,9 +112,10 @@ begin nca = 2 end if vars =(/"SO2","SO4","BC","OC","NH4NO3"/) + varsn =(/"SO~B~2~N~","SO~B~4~N~","BC","OC","NH~B~4~N~NO~B~3~N~"/) vars_data =(/"so2","so4","ec","oc","nh4no3"/) data_n =(/"SO2_","SO4_","EC_","OC_","NH4NO3_"/) - vars_units = (/"ppv","ug/m3","ug/m3","ug/m3","ug/m3"/) + vars_units = (/"ug/m~S~3~N~","ug/m~S~3~N~","ug/m~S~3~N~","ug/m~S~3~N~","ug/m~S~3~N~"/) ;qmin = (/0.01,0.01,0.005 ;qmax = (/5., 10., nvars = dimsizes(vars) @@ -142,7 +143,8 @@ begin end if ; calculate plotting values -do i = 0, nvars-1 +do i = 0, nvars-2 + print(vars(i)) if (compare .eq. "OBS") then nca = 1 else ; CASE 2 IS MODEL @@ -189,8 +191,18 @@ do i = 0, nvars-1 delete(varsoa) varsoa = (/"CB1","CB2"/) else - delete(varsoa) - varsoa = (/"bc_a1"/) + if (isfilevar(inptr,"bc_a1")) then + delete(varsoa) + varsoa = (/"bc_a1"/) + end if + if (isfilevar(inptr,"bc_a1") .and. isfilevar(inptr,"bc_a4")) then + delete(varsoa) + varsoa = (/"bc_a1","bc_a4"/) + end if + if (isfilevar(inptr,"bc_a1") .and. isfilevar(inptr,"bc_a3")) then + delete(varsoa) + varsoa = (/"bc_a1","bc_a3"/) + end if end if end if varsoa1 = -1 @@ -200,23 +212,42 @@ do i = 0, nvars-1 delete(varsoa1) varsoa1 = (/"OC1","OC2"/) else - delete(varsoa1) - varsoa1 = (/"pom_a1"/) + if (isfilevar(inptr,"pom_a1")) then + delete(varsoa1) + varsoa1 = (/"pom_a1"/) + end if + if (isfilevar(inptr,"pom_a1") .and. isfilevar(inptr,"pom_a4")) then + delete(varsoa1) + varsoa1 = (/"pom_a1","pom_a4"/) + end if + if (isfilevar(inptr,"pom_a1") .and. isfilevar(inptr,"pom_a3")) then + delete(varsoa1) + varsoa1 = (/"pom_a1","pom_a3"/) + end if end if ; function SOA Colette if (isfilevar(inptr,"SOAI") .and. isfilevar(inptr,"SOAT") .and. isfilevar(inptr,"SOAB") .and. isfilevar(inptr,"SOAX") .and. isfilevar(inptr,"SOAM")) then delete(varsoa2) varsoa2 = (/"SOAI","SOAT","SOAB","SOAX","SOAM"/) - else - if (isfilevar(inptr,"soa_a1") .and. isfilevar(inptr,"soa_a2")) then - delete(varsoa2) - varsoa2 = (/"soa_a1","soa_a2"/) - else - if (isfilevar(inptr,"SOA")) then + end if + if (isfilevar(inptr,"SOA")) then delete(varsoa2) varsoa2 = (/"SOA"/) - end if + end if + if (isfilevar(inptr,"soa_a1") .and. isfilevar(inptr,"soa_a2")) then + delete(varsoa2) + varsoa2 = (/"soa_a1","soa_a2"/) + else + if (isfilevar(inptr,"soaff1_a1") .and. isfilevar(inptr,"soaff1_a2")) then + delete(varsoa2) + varsoa2 = (/"soaff1_a1","soaff1_a2","soaff2_a1","soaff2_a2","soaff3_a1","soaff3_a2","soaff4_a1","soaff4_a2","soaff5_a1","soaff5_a2",\ + "soabb1_a1","soabb1_a2","soabb2_a1","soabb2_a2","soabb3_a1","soabb3_a2","soabb4_a1","soabb4_a2","soabb5_a1","soabb5_a2",\ + "soabg1_a1","soabg1_a2","soabg2_a1","soabg2_a2","soabg3_a1","soabg3_a2","soabg4_a1","soabg4_a2","soabg5_a1","soabg5_a2" /) end if + if (isfilevar(inptr,"soa1_a1") .and. isfilevar(inptr,"soa1_a2")) then + delete(varsoa2) + varsoa2 = (/"soa_a1","soa_a2","soa2_a1","soa2_a2","soa3_a1","soa3_a2","soa4_a1","soa4_a2","soa5_a1","soa5_a2"/) + end if end if delete(varsoa) varsoa = array_append_record(varsoa1,varsoa2,0) @@ -243,20 +274,22 @@ do i = 0, nvars-1 var11 = var11a(0,nlev-1,:,:) delete(var11a) if vars(i).eq."SO2" then + ; SO2 improve in ug/m3 ; convert mmr to kg/kg - var11 = var11* 64./28.97 + if var11@units.ne."kg/kg" then + var11 = var11* 64./28.97 + end if end if - if vars(i).eq."SO4" .and. (vinta .eq. "so4_a1" .or. vinta .eq. "so4_a2" .or. vinta .eq. "so4_3") then + if vars(i).eq."SO4" .and. (vinta .eq. "so4_a1" .or. vinta .eq. "so4_a2" .or. vinta .eq. "so4_a3") then var11 = var11*96.06/115.11 ; adjust that SO4 is really SO4NO3 end if - if vars(i).eq."pom_a1" then - var11 = var11* 0.714286 +; if vars(i).eq."pom_a1" then +; var11 = var11* 0.714286 +; end if + if vars(i).eq."SOA" .and. (vinta .eq. "SOAI" .or. vinta .eq. "SOAM" .or. vinta .eq. "SOAB" .or. vinta .eq. "SOAT" .or. vinta .eq. "SOAX") then + var11 = var11*mwsoa_c(si)/mwsoa(si) end if -; if vars(i).eq."SOA" .and. (vinta .eq. "SOAI" .or. vinta .eq. "SOAM" .or. vinta .eq. "SOAB" .or. vinta .eq. "SOAT" .or. vinta .eq. "SOAX") then -; var11 = var11*mwsoa_c(si)/mwsoa(si) - else - var11 = var11@_FillValue - end if + end if if ca.eq.0 then if si.eq.0 then var1 = var11 @@ -275,7 +308,8 @@ do i = 0, nvars-1 delete(varsoa) delete(var11) end do ; ca - var1 = var1 * rho1 * 1.e9 ; convert from kg/kg to ug/m3 ; ppb for SO2 + + var1 = var1 * rho1 * 1.e9 ; convert from kg/kg to ug/m3 ; var1@_FillValue = -999 var1!0 = "lat" var1!1 = "lon" @@ -295,7 +329,7 @@ do i = 0, nvars-1 ; load data and interpolate model to station location - dir = "/glade/p/acd/tilmes/amwg/amwg_diag_20131004_work/obs_data/" +;dir = "$OBS_DATA/cam-chem/" stn_name = systemfunc("ls $OBS_DATA/cam-chem/"+data_n(i)+"IMPROVE*nc") do nstn=0,dimsizes(stn_name)-1 @@ -432,20 +466,21 @@ res@trXAxisType = "LogAxis" ; loop over variables - do i = 0, nvars -1 + do i = 0, nvars -2 Xdata(0,:) = (/stn_ann_avg_obs(i,:)/) Ydata(0,:) = (/stn_ann_avg_mod1(i,:)/) if (.not.all(ismissing(Xdata(0,:)))) then ccro=esccr(Xdata(0,:),Xdata(0,:),0) - avg_obs=avg(Xdata(0,:)) + avg_obs = dim_median(Xdata(0,:)) else ccro = 0. avg_obs = 0. end if if (.not.all(ismissing(Ydata(0,:)))) then ccr1=esccr(Xdata(0,:),Ydata(0,:),0) - avg_mod1=avg(Ydata(0,:)) + avg_mod1=dim_median(Ydata(0,:)) + ; avg_mod1=avg(Ydata(0,:)) else avg_mod1 = 0. ccr1 = 0. @@ -456,7 +491,14 @@ res@trXAxisType = "LogAxis" Ydata(1,:) = (/stn_ann_avg_mod2(i,:)/) if (.not.all(ismissing(Xdata(1,:)))) then ccr2=esccr(Xdata(1,:),Ydata(1,:),0) - avg_mod2=avg(Ydata(1,:)) + ; ins = ind(Xdata(1,:).gt.0) + avg_mod2=dim_median(Ydata(1,:)) + ; avg_mod2=stat_medrng(Ydata(1,:)) + ; print(Ydata(0,ins)) + ; print(Ydata(1,ins)) + ; print(avg_obs) + ; print(avg_mod1) + ; print(avg_mod2) else ccr2 = 0. avg_mod2 = 0. @@ -473,7 +515,7 @@ res@trXAxisType = "LogAxis" res@tiXAxisString = "Observed "+vars(i)+" ("+vars_units(i)+")" res@tiYAxisString = "Simulated "+vars(i)+" ("+vars_units(i)+")" - res@gsnLeftString = vars(i)+" ("+vars_units(i)+")" + res@gsnLeftString = varsn(i)+" ("+vars_units(i)+")" res@pmLegendDisplayMode = "Always" @@ -509,26 +551,27 @@ res@trXAxisType = "LogAxis" txres@txPerimOn = False txres@txJust = "CenterLeft" txres@txBackgroundFillColor = 0 - txres@txFontHeightF = 0.013 + txres@txFontHeightF = 0.015 aaa=log10(qmax) bbb=log10(qmin) aba=(aaa-bbb)/2 - txid2 = gsn_add_text(wks, plot(i), "mean_test="+sprintf("%5.2f",avg_mod1)+" R="+sprintf("%5.2f",ccr1), qmin+qmin*0.2, 10.^(aaa-0.2*aba),txres) - txid3 = gsn_add_text(wks, plot(i), "mean_obs="+sprintf("%5.2f",avg_obs),qmin+qmin*.2, 10.^(aaa-0.1*aba),txres) + txid3 = gsn_add_text(wks, plot(i), "Median Obs="+sprintf("%5.2f",avg_obs),qmin+qmin*.2, 10.^(aaa-0.1*aba),txres) + txres@txFontColor = "blue" + txid2 = gsn_add_text(wks, plot(i), "Median ="+sprintf("%5.2f",avg_mod1)+" R="+sprintf("%5.2f",ccr1), qmin+qmin*0.2, 10.^(aaa-0.2*aba),txres) if (compare .eq. "OBS") then else - txid1 = gsn_add_text(wks, plot(i), "mean_cntl="+sprintf("%5.2f",avg_mod2)+" R="+sprintf("%5.2f",ccr2), qmin+qmin*0.2, 10.^(aaa-0.3*aba), txres) + txres@txFontColor = "red" + txid1 = gsn_add_text(wks, plot(i), "Median ="+sprintf("%5.2f",avg_mod2)+" R="+sprintf("%5.2f",ccr2), qmin+qmin*0.2, 10.^(aaa-0.3*aba), txres) end if - - + txres@txFontColor = "black" end do ;********************************************* txres = True txres@txFontHeightF = 0.017 title = "IMPROVE "+season - gsn_text_ndc(wks,title,.50,.85,txres) + gsn_text_ndc(wks,title,.45,.98,txres) panres = True panres@gsnFrame = False @@ -536,10 +579,10 @@ res@trXAxisType = "LogAxis" panres@gsnPanelTop = 0.96 if (time_stamp .eq. "True") then panres@gsnPanelBottom = 0.05 - gsn_panel (wks,plot,(/2,3/),panres) + gsn_panel (wks,plot,(/2,2/),panres) infoTimeStamp(wks,0.011,"DIAG Version: "+version) else - gsn_panel (wks,plot,(/2,3/),panres) + gsn_panel (wks,plot,(/2,2/),panres) end if frame(wks) delete (title) diff --git a/atm_diag/profiles_aircraft_noaa.ncl b/atm_diag/profiles_aircraft_noaa.ncl index 6c197c86..7ba4a7f7 100644 --- a/atm_diag/profiles_aircraft_noaa.ncl +++ b/atm_diag/profiles_aircraft_noaa.ncl @@ -216,7 +216,7 @@ do re = 0, nregions-1 ; open obs data file if (vars(v).ne. "OH" .and. vars(v).ne."SAD_TROP" .and. vars(v).ne."CLOUD") then ;; dir_in = "/glade/p/acd/tilmes/amwg/amwg_diag_20131004_work/obs_data/aircraft/" - dir_in = "$OBS_DATA/cam-chem/aircraft/NOAA/" + dir_in = "$OBS_DATA/cam-chem/" rin1 = addfile(dir_in+variablen(v)+aircraft(re1(re))+".nc","r") air1_med = rin1->prof_med air1_p25 = rin1->prof_p25 diff --git a/diagnostics/diagnostics/atm/model_vs_model.py b/diagnostics/diagnostics/atm/model_vs_model.py index f5dec22a..4ba34667 100644 --- a/diagnostics/diagnostics/atm/model_vs_model.py +++ b/diagnostics/diagnostics/atm/model_vs_model.py @@ -235,6 +235,7 @@ def run_diagnostics(self, env, scomm): glob_set = plot_set.replace('_','') plot_set = 'set5_6' elif 'set_1' == plot_set or 'cset_1' == plot_set: + print('DEBUG model_vs_model: plot_set = %s' % plot_set) glob_set = 'table_' plot_set = plot_set.replace('_','') elif 'sets' == plot_set: diff --git a/diagnostics/diagnostics/atm/model_vs_obs.py b/diagnostics/diagnostics/atm/model_vs_obs.py index 109a21a0..0a7d20f1 100644 --- a/diagnostics/diagnostics/atm/model_vs_obs.py +++ b/diagnostics/diagnostics/atm/model_vs_obs.py @@ -228,6 +228,7 @@ def run_diagnostics(self, env, scomm): glob_set = plot_set.replace('_','') plot_set = 'set5_6' elif 'set_1' == plot_set or 'cset_1' == plot_set: + print('DEBUG model_vs_obs: plot_set = %s' % plot_set) glob_set = 'table_' plot_set = plot_set.replace('_','') elif 'sets' == plot_set: diff --git a/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py b/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py index f8b376a6..b9c3bd5a 100644 --- a/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py +++ b/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py @@ -75,6 +75,7 @@ def check_prerequisites(self, env): # call ncks to extract the UAREA variable za_args = ['ncks','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['TOBSFILE']) ] if env['netcdf_format'] in ['netcdfLarge']: + za_args = ['ncks','-6','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['TOBSFILE']) ] try: ## subprocess.check_output( ['ncks','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['TOBSFILE']) ], env=env) diff --git a/ocn_diag/ncl_lib/field_3d_za.ncl b/ocn_diag/ncl_lib/field_3d_za.ncl index 456cb931..c6730ff0 100644 --- a/ocn_diag/ncl_lib/field_3d_za.ncl +++ b/ocn_diag/ncl_lib/field_3d_za.ncl @@ -12,6 +12,14 @@ begin nlev = 21 missing = 1.0e30 +;; NOTE - for pop, the basin regions are always +;; referenced by an index number, 1-14, with the +;; region areas defined in the POP source code +;; input_templates/[grid]_region_ids +;; We are only interested in the regions listed +;; below so want to subset the input fields +;; for just these basin index for these regions + global = 0 atlantic = 6 pacific = 2 @@ -65,7 +73,9 @@ begin do n=0,n_fields-1 fname = ListPop(field_name) - field = f_za->$fname$(0,:,:,:) +;; just need the subset basin index of the second dimension +;; field = f_za->$fname$(0,:,:,:) + field = f_za->$fname$(0,0:6,:,:) field&z_t = z_t field@units = "km" @@ -76,10 +86,11 @@ begin end if field_obs = where(field_obs .lt. -50. .or. field_obs .gt. 1.e10,field_obs@_FillValue,field_obs) field_diff = field +;; get just the basin index (region) in the first dimension if (isvar("ind_z_obs")) then - field_diff = field - field_obs(:,ind_z_obs,:) + field_diff = field - field_obs(0:6,ind_z_obs,:) else - field_diff = field - field_obs + field_diff = field - field_obs(0:6,:,:) end if units = "~S~o~N~C" dmin = -4.0 @@ -176,6 +187,7 @@ begin opt@do_panel = False end if +;; field now has only 1 dimension plot1 = yz_plot(wks, field(region_index(m),:,:), lat_t, z_t, case_info, \ missing, units, dlev, lev, coltab, opt) diff --git a/ocn_diag/ncl_lib/field_3d_za_diff.ncl b/ocn_diag/ncl_lib/field_3d_za_diff.ncl index 268c126e..3699924e 100644 --- a/ocn_diag/ncl_lib/field_3d_za_diff.ncl +++ b/ocn_diag/ncl_lib/field_3d_za_diff.ncl @@ -11,6 +11,14 @@ begin nlev = 21 missing = 1.0e30 +;; NOTE - for pop, the basin regions are always +;; referenced by an index number, 1-14, with the +;; region areas defined in the POP source code +;; input_templates/[grid]_region_ids +;; We are only interested in the regions listed +;; below so want to subset the input fields +;; for just these basin index for these regions + global = 0 atlantic = 6 pacific = 2 @@ -63,16 +71,19 @@ begin coltab(1:) = ((color2-color1+1)/(nlev-1))*ispan(0,nlev-1,1)+color1 coltab(0) = 0 +;; just need the subset basin index of the second dimension do n=0,n_fields-1 fname = ListPop(field_name) if (dimsizes(getfilevardimsizes(fileid_1,fname)) .eq. 4) then - field_1 = fileid_1->$fname$(0,:,:,:) +;; field_1 = fileid_1->$fname$(0,:,:,:) + field_1 = fileid_1->$fname$(0,0:6,:,:) else field_1 = fileid_1->$fname$(:,:,:) end if if (dimsizes(getfilevardimsizes(fileid_1,fname)) .eq. 4) then - field_2 = fileid_2->$fname$(0,:,:,:) +;; field_2 = fileid_2->$fname$(0,:,:,:) + field_2 = fileid_2->$fname$(0,0:6,:,:) else field_2 = fileid_2->$fname$(:,:,:) end if diff --git a/timeseries/timeseries/cesm_tseries_generator.py b/timeseries/timeseries/cesm_tseries_generator.py index 03276258..de42c56a 100755 --- a/timeseries/timeseries/cesm_tseries_generator.py +++ b/timeseries/timeseries/cesm_tseries_generator.py @@ -139,8 +139,8 @@ def readArchiveXML(caseroot, input_rootdir, output_rootdir, casename, standalone raise TypeError(err_msg) # load the tseries_time_variant_variables into a list + variable_list = list() if comp_archive_spec.find("tseries_time_variant_variables") is not None: - variable_list = list() for variable in comp_archive_spec.findall("tseries_time_variant_variables/variable"): variable_list.append(variable.text) diff --git a/timeseries/timeseries/chunking.py b/timeseries/timeseries/chunking.py index ca4afb1b..e40119ae 100755 --- a/timeseries/timeseries/chunking.py +++ b/timeseries/timeseries/chunking.py @@ -5,6 +5,26 @@ import cf_units import datetime +def num2date(time_value, unit, calendar): + if ('common_year' in unit): + my_unit = unit.replace('common_year', 'day') + my_time_value = time_value * 365 + else: + my_unit = unit + my_time_value = time_value + return cf_units.num2date(my_time_value, my_unit, calendar) + + +def date2num(date, unit, calendar): + if ('common_year' in unit): + my_unit = unit.replace('common_year', 'day') + my_conversion = 365. + else: + my_unit = unit + my_conversion = 1. + num = cf_units.date2num(date, my_unit, calendar) + return num/my_conversion + def get_input_dates(glob_str): ''' @@ -96,11 +116,16 @@ def get_cesm_date(fn,t=None): l = len(f.variables[att['bounds']]) d = (f.variables[att['bounds']][l-1][1]) else: - d = f.variables['time'][1] - + # problem if time has only one value when units are common_year + try: + d = f.variables['time'][1] + except: + d = f.variables['time'][0] + print 'after d = ',d - d1 = cf_units.num2date(d,att['units'],att['calendar'].lower()) +## d1 = cf_units.num2date(d,att['units'],att['calendar'].lower()) + d1 = num2date(d,att['units'],att['calendar'].lower()) print 'd1.year = ',str(d1.year).zfill(4) print 'd1.month = ',str(d1.month).zfill(2) @@ -128,7 +153,8 @@ def get_chunk_range(tper, size, start, cal, units): ''' # Get the first date - d1 = cf_units.num2date(start, units, cal) +## d1 = cf_units.num2date(start, units, cal) + d1 = num2date(start, units, cal) # Figure out how many days each chunk should be if 'day' in tper: #day @@ -144,11 +170,13 @@ def get_chunk_range(tper, size, start, cal, units): y2 = y2 + 1 m2 = m2 - 12 d2 = datetime.datetime(y2, m2, d1.day, d1.hour, d1.minute) - end = cf_units.date2num(d2, units, cal) +## end = cf_units.date2num(d2, units, cal) + end = date2num(d2, units, cal) elif 'year' in tper: #year d2 = datetime.datetime(int(size)+d1.year, d1.month, d1.day, d1.hour, d1.minute) - end = cf_units.date2num(d2, units, cal) +## end = cf_units.date2num(d2, units, cal) + end = date2num(d2, units, cal) return start, end From 31237c3ae82e3870ba4a757171ccf91d13f34678 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Mon, 13 Mar 2017 21:55:10 -0600 Subject: [PATCH 14/27] fix for atm diags set1 and cset1 table files move to the correct subdirs --- diagnostics/diagnostics/atm/model_vs_model.py | 36 ++++++++++++------- diagnostics/diagnostics/atm/model_vs_obs.py | 32 +++++++++++------ 2 files changed, 46 insertions(+), 22 deletions(-) diff --git a/diagnostics/diagnostics/atm/model_vs_model.py b/diagnostics/diagnostics/atm/model_vs_model.py index 4ba34667..ad31fa4e 100644 --- a/diagnostics/diagnostics/atm/model_vs_model.py +++ b/diagnostics/diagnostics/atm/model_vs_model.py @@ -230,31 +230,43 @@ def run_diagnostics(self, env, scomm): # Create set dirs, copy plots to set dir, and create html file for set requested_plot_sets.append('sets') # Add 'sets' to create top level html files + glob_set = list() for plot_set in requested_plot_sets: if 'set_5' == plot_set or 'set_6' == plot_set: - glob_set = plot_set.replace('_','') + glob_set.append(plot_set.replace('_','')) plot_set = 'set5_6' - elif 'set_1' == plot_set or 'cset_1' == plot_set: - print('DEBUG model_vs_model: plot_set = %s' % plot_set) - glob_set = 'table_' - plot_set = plot_set.replace('_','') + elif 'cset_1' == plot_set: + print('DEBUG model_vs_obs: plot_set = %s' % plot_set) + glob_set.append('table_soa') + glob_set.append('table_chem') + plot_set = plot_set.replace('_','') + elif 'set_1' == plot_set: + print('DEBUG model_vs_obs: plot_set = %s' % plot_set) + glob_set.append('table_GLBL') + glob_set.append('table_NEXT') + glob_set.append('table_SEXT') + glob_set.append('table_TROP') + plot_set = plot_set.replace('_','') elif 'sets' == plot_set: set_dir = web_dir + '/' else: + glob_set.append(plot_set) plot_set = plot_set.replace('_','') - glob_set = plot_set + if 'sets' not in plot_set: #'sets' is top level, don't create directory or copy images files set_dir = web_dir + '/' + plot_set # Create the plot set web directory if not os.path.exists(set_dir): os.makedirs(set_dir) # Copy plots into the correct web dir - glob_string = env['test_path_diag']+'/'+glob_set+'*.*' - imgs = glob.glob(glob_string) - if imgs > 0: - for img in imgs: - new_fn = set_dir + '/' + os.path.basename(img) - os.rename(img,new_fn) + for gs in glob_set: + glob_string = env['test_path_diag']+'/'+gs+'*.*' + imgs = glob.glob(glob_string) + if imgs > 0: + for img in imgs: + new_fn = set_dir + '/' + os.path.basename(img) + os.rename(img,new_fn) + # Copy/Process html files if 'sets' in plot_set: orig_html = env['HTML_HOME']+'/'+plot_set diff --git a/diagnostics/diagnostics/atm/model_vs_obs.py b/diagnostics/diagnostics/atm/model_vs_obs.py index 0a7d20f1..70e484df 100644 --- a/diagnostics/diagnostics/atm/model_vs_obs.py +++ b/diagnostics/diagnostics/atm/model_vs_obs.py @@ -223,31 +223,43 @@ def run_diagnostics(self, env, scomm): # Create set dirs, copy plots to set dir, and create html file for set requested_plot_sets.append('sets') # Add 'sets' to create top level html files + glob_set = list() for plot_set in requested_plot_sets: if 'set_5' == plot_set or 'set_6' == plot_set: - glob_set = plot_set.replace('_','') + glob_set.append(plot_set.replace('_','')) plot_set = 'set5_6' - elif 'set_1' == plot_set or 'cset_1' == plot_set: + elif 'cset_1' == plot_set: print('DEBUG model_vs_obs: plot_set = %s' % plot_set) - glob_set = 'table_' + glob_set.append('table_soa') + glob_set.append('table_chem') + plot_set = plot_set.replace('_','') + elif 'set_1' == plot_set: + print('DEBUG model_vs_obs: plot_set = %s' % plot_set) + glob_set.append('table_GLBL') + glob_set.append('table_NEXT') + glob_set.append('table_SEXT') + glob_set.append('table_TROP') plot_set = plot_set.replace('_','') elif 'sets' == plot_set: set_dir = web_dir + '/' else: + glob_set.append(plot_set) plot_set = plot_set.replace('_','') - glob_set = plot_set + if 'sets' not in plot_set: #'sets' is top level, don't create directory or copy images files set_dir = web_dir + '/' + plot_set # Create the plot set web directory if not os.path.exists(set_dir): os.makedirs(set_dir) # Copy plots into the correct web dir - glob_string = env['test_path_diag']+'/'+glob_set+'*.*' - imgs = glob.glob(glob_string) - if imgs > 0: - for img in imgs: - new_fn = set_dir + '/' + os.path.basename(img) - os.rename(img,new_fn) + for gs in glob_set: + glob_string = env['test_path_diag']+'/'+gs+'*.*' + imgs = glob.glob(glob_string) + if imgs > 0: + for img in imgs: + new_fn = set_dir + '/' + os.path.basename(img) + os.rename(img,new_fn) + # Copy/Process html files if 'sets' in plot_set: orig_html = env['HTML_HOME']+'/'+plot_set From e20a6db9749ce7a075d4f4254463c070201221eb Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Mon, 13 Mar 2017 22:40:11 -0600 Subject: [PATCH 15/27] fix for basin region index to accommodate the Caspian sea removal as an ocean basin --- ocn_diag/ncl_lib/field_3d_za.ncl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ocn_diag/ncl_lib/field_3d_za.ncl b/ocn_diag/ncl_lib/field_3d_za.ncl index c6730ff0..e454e26b 100644 --- a/ocn_diag/ncl_lib/field_3d_za.ncl +++ b/ocn_diag/ncl_lib/field_3d_za.ncl @@ -115,9 +115,10 @@ begin field_diff = field if (dimsizes(z_t) .ne. dimsizes(field_obs&$field_obs!1$)) then ind_z_obs = ind_nearest_coord(z_t*1000.,field_obs&$field_obs!1$,0) - field_diff = field - field_obs(:,ind_z_obs,:) +;; only want the first 6 basin regions from the field_obs + field_diff = field - field_obs(0:6,ind_z_obs,:) else - field_diff = field - field_obs + field_diff = field - field_obs(0:6,:,:) end if units = "psu" From e542bf9b6abd4d7048506e3af8b44f302627e402 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Mon, 20 Mar 2017 09:58:41 -0600 Subject: [PATCH 16/27] update to NCL 6.4.0 on cheyenne --- Machines/machine_postprocess.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 4d32506b..40319e09 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -86,7 +86,7 @@ module load nco/4.6.2 module load netcdf4-python/1.2.7 module load cf_units/1.1.3 - module load ncl/6.3.0 + module load ncl/6.4.0 From 3f53aada95d0d045e1a6f11afa2a5920a7b738e2 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Sun, 26 Mar 2017 22:58:27 -0600 Subject: [PATCH 17/27] bug fix to copy plots to correct web subdirs --- diagnostics/diagnostics/atm/atm_avg_generator.py | 12 ------------ diagnostics/diagnostics/atm/atm_diags_generator.py | 14 +------------- diagnostics/diagnostics/atm/create_atm_html.py | 1 - diagnostics/diagnostics/atm/model_vs_model.py | 10 +++++----- diagnostics/diagnostics/atm/model_vs_obs.py | 5 +---- 5 files changed, 7 insertions(+), 35 deletions(-) diff --git a/diagnostics/diagnostics/atm/atm_avg_generator.py b/diagnostics/diagnostics/atm/atm_avg_generator.py index 73b407f0..cc6ae758 100644 --- a/diagnostics/diagnostics/atm/atm_avg_generator.py +++ b/diagnostics/diagnostics/atm/atm_avg_generator.py @@ -214,8 +214,6 @@ def get_variable_list(envDict,in_dir,case_prefix, key_infile, htype, stream): else: # htype == series import glob - -## glob_string = '{0}/{1}.{2}.'.format(in_dir,case_prefix,stream) glob_string = '{0}/{1}.'.format(in_dir,case_prefix) file_list = glob.glob(glob_string+'*') print (glob_string,'File list:',file_list) @@ -457,11 +455,6 @@ def main(options, main_comm, debugMsg): if envDict['test_compute_climo'] == 'True': try: -## if test_time_series == 'True': -## h_path = envDict['test_path_history']+'/atm/proc/tseries/month_1/' -## else: -## h_path = envDict['test_path_history']+'/atm/hist/' - h_path = envDict['test_path_history']+'/atm/'+envDict['test_path_history_subdir'] # generate the climatology files used for all plotting types using the pyAverager @@ -489,11 +482,6 @@ def main(options, main_comm, debugMsg): cntl_end_year,'atm',suffix,filep, envDict['cntl_path_history_subdir']) -## if cntl_time_series == 'True': -## h_path = envDict['cntl_path_history']+'/atm/proc/tseries/month_1/' -## else: -## h_path = envDict['cntl_path_history']+'/atm/hist/' - h_path = envDict['cntl_path_history']+'/atm/'+envDict['cntl_path_history_subdir'] # generate the climatology files used for all plotting types using the pyAverager diff --git a/diagnostics/diagnostics/atm/atm_diags_generator.py b/diagnostics/diagnostics/atm/atm_diags_generator.py index 58cfee75..63777689 100755 --- a/diagnostics/diagnostics/atm/atm_diags_generator.py +++ b/diagnostics/diagnostics/atm/atm_diags_generator.py @@ -60,7 +60,7 @@ def commandline_options(): help='show exception backtraces as extra debugging ' 'output') - parser.add_argument('--debug', nargs=1, required=False, type=int, default=0, + parser.add_argument('--debug', nargs=1, type=int, default=0, help='debugging verbosity level output: 0 = none, 1 = minimum, 2 = maximum. 0 is default') parser.add_argument('--caseroot', nargs=1, required=True, @@ -154,8 +154,6 @@ def initialize_main(envDict, caseroot, debugMsg, standalone): envDict (dictionary) - environment dictionary """ # setup envDict['id'] = 'value' parsed from the CASEROOT/[env_file_list] files -## env_file_list = ['../env_case.xml', '../env_run.xml', '../env_build.xml', '../env_mach_pes.xml', './env_postprocess.xml', './env_diags_atm.xml'] -## if standalone: env_file_list = ['./env_postprocess.xml', './env_diags_atm.xml'] envDict = cesmEnvLib.readXML(caseroot, env_file_list) @@ -306,18 +304,8 @@ def main(options, main_comm, debugMsg): # check the prerequisites for the diagnostics types debugMsg('Checking prerequisites for {0}'.format(diag.__class__.__name__), header=True) - - #if lmaster: envDict = diag.check_prerequisites(envDict, inter_comm) - inter_comm.sync() - - ## broadcast the envDict - #envDict = inter_comm.partition(data=envDict, func=partition.Duplicate(), involved=True) - - # set the shell env using the values set in the XML and read into the envDict across all tasks - #cesmEnvLib.setXmlEnv(envDict) - debugMsg('inter_comm = {0}'.format(inter_comm)) diag.run_diagnostics(envDict, inter_comm) diff --git a/diagnostics/diagnostics/atm/create_atm_html.py b/diagnostics/diagnostics/atm/create_atm_html.py index 50bd68d6..3e911933 100644 --- a/diagnostics/diagnostics/atm/create_atm_html.py +++ b/diagnostics/diagnostics/atm/create_atm_html.py @@ -19,7 +19,6 @@ def create_plotset_html(html_file_prefix, web_path, set_name, env, diag_type): os.remove(new_fn) new_html = open(new_fn,'w') -## if env['MODEL_VS_OBS'] == 'True': if diag_type == 'model_vs_obs': title = env['test_casename'] + '
and
OBS data (info)' else: diff --git a/diagnostics/diagnostics/atm/model_vs_model.py b/diagnostics/diagnostics/atm/model_vs_model.py index ad31fa4e..436b4a86 100644 --- a/diagnostics/diagnostics/atm/model_vs_model.py +++ b/diagnostics/diagnostics/atm/model_vs_model.py @@ -236,12 +236,10 @@ def run_diagnostics(self, env, scomm): glob_set.append(plot_set.replace('_','')) plot_set = 'set5_6' elif 'cset_1' == plot_set: - print('DEBUG model_vs_obs: plot_set = %s' % plot_set) glob_set.append('table_soa') - glob_set.append('table_chem') + glob_set.append('table_chem') plot_set = plot_set.replace('_','') elif 'set_1' == plot_set: - print('DEBUG model_vs_obs: plot_set = %s' % plot_set) glob_set.append('table_GLBL') glob_set.append('table_NEXT') glob_set.append('table_SEXT') @@ -250,7 +248,7 @@ def run_diagnostics(self, env, scomm): elif 'sets' == plot_set: set_dir = web_dir + '/' else: - glob_set.append(plot_set) + glob_set.append(plot_set.replace('_','')) plot_set = plot_set.replace('_','') if 'sets' not in plot_set: #'sets' is top level, don't create directory or copy images files @@ -308,6 +306,9 @@ def run_diagnostics(self, env, scomm): diag_path = web_dir move_files = False + print('DEBUG: model vs. model web_dir = {0}'.format(web_dir)) + print('DEBUG: model vs. model diag_path = {0}'.format(diag_path)) + # move the files to the new diag_path if move_files: try: @@ -321,7 +322,6 @@ def run_diagnostics(self, env, scomm): env_file = '{0}/env_diags_atm.xml'.format(env['PP_CASE_PATH']) key = 'ATMDIAG_WEBDIR_{0}'.format(self._name) value = diag_path - ##web_file = '{0}/web_dirs/{1}.{2}-{3}'.format(env['PP_CASE_PATH'], key, scomm.get_size(), scomm.get_rank() ) web_file = '{0}/web_dirs/{1}.{2}'.format(env['PP_CASE_PATH'], key, datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S')) try: diagUtilsLib.write_web_file(web_file, 'atm', key, value) diff --git a/diagnostics/diagnostics/atm/model_vs_obs.py b/diagnostics/diagnostics/atm/model_vs_obs.py index 70e484df..50d0cb0a 100644 --- a/diagnostics/diagnostics/atm/model_vs_obs.py +++ b/diagnostics/diagnostics/atm/model_vs_obs.py @@ -229,12 +229,10 @@ def run_diagnostics(self, env, scomm): glob_set.append(plot_set.replace('_','')) plot_set = 'set5_6' elif 'cset_1' == plot_set: - print('DEBUG model_vs_obs: plot_set = %s' % plot_set) glob_set.append('table_soa') glob_set.append('table_chem') plot_set = plot_set.replace('_','') elif 'set_1' == plot_set: - print('DEBUG model_vs_obs: plot_set = %s' % plot_set) glob_set.append('table_GLBL') glob_set.append('table_NEXT') glob_set.append('table_SEXT') @@ -243,7 +241,7 @@ def run_diagnostics(self, env, scomm): elif 'sets' == plot_set: set_dir = web_dir + '/' else: - glob_set.append(plot_set) + glob_set.append(plot_set.replace('_','')) plot_set = plot_set.replace('_','') if 'sets' not in plot_set: #'sets' is top level, don't create directory or copy images files @@ -314,7 +312,6 @@ def run_diagnostics(self, env, scomm): env_file = '{0}/env_diags_atm.xml'.format(env['PP_CASE_PATH']) key = 'ATMDIAG_WEBDIR_{0}'.format(self._name) value = diag_path - ##web_file = '{0}/web_dirs/{1}.{2}-{3}'.format(env['PP_CASE_PATH'], key, scomm.get_size(), scomm.get_rank() ) web_file = '{0}/web_dirs/{1}.{2}'.format(env['PP_CASE_PATH'], key, datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S')) try: diagUtilsLib.write_web_file(web_file, 'atm', key, value) From 3348bb2d66b73c17a45c8467204934d80dab6bb7 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Fri, 7 Apr 2017 19:13:06 -0600 Subject: [PATCH 18/27] update to allow TIMESERIES_GENERATE_ALL option in env_postprocess.xml to override settings in env_timeseries.xml; set MODEL_VS_CONTROL option in ocean diags to FALSE by default; round the CISM time value in the chunking code to avoid boundary problems. --- Config/config_postprocess.xml | 8 +++ Config/config_timeseries.xml | 60 ++++++++++--------- .../ocn/Config/config_diags_ocn.xml | 2 +- .../timeseries/cesm_tseries_generator.py | 16 +++-- timeseries/timeseries/chunking.py | 3 +- 5 files changed, 52 insertions(+), 37 deletions(-) diff --git a/Config/config_postprocess.xml b/Config/config_postprocess.xml index 83b29adf..90a8de16 100644 --- a/Config/config_postprocess.xml +++ b/Config/config_postprocess.xml @@ -136,6 +136,14 @@ desc="If TRUE, create only complete chunks of variable timeseries data files as determined by the env_timeseries.xml tseries_filecat_tper and tseries_filecat_n elements. If FALSE, then incomplete chunks of variable timeseries data will be created and appended to upon subsequent running of the timeseries script. Default is TRUE." > + + + + @@ -38,7 +40,7 @@ hist - TRUE + FALSE netcdf4c month_1 years @@ -46,7 +48,7 @@ hist - TRUE + FALSE netcdf4c day_1 years @@ -54,7 +56,7 @@ hist - TRUE + FALSE netcdf4c hour_6 years @@ -62,7 +64,7 @@ hist - TRUE + FALSE netcdf4c hour_3 years @@ -70,7 +72,7 @@ hist - TRUE + FALSE netcdf4c hour_1 years @@ -78,7 +80,7 @@ hist - TRUE + FALSE netcdf4c min_30 years @@ -135,7 +137,7 @@ hist - TRUE + FALSE netcdf4c month_1 years @@ -143,7 +145,7 @@ hist - TRUE + FALSE netcdf4c day_1 years @@ -151,7 +153,7 @@ hist - TRUE + FALSE netcdf4c hour_6 years @@ -159,7 +161,7 @@ hist - TRUE + FALSE netcdf4c hour_3 years @@ -167,7 +169,7 @@ hist - TRUE + FALSE netcdf4c hour_1 years @@ -175,7 +177,7 @@ hist - TRUE + FALSE netcdf4c min_30 years @@ -202,7 +204,7 @@ hist - TRUE + FALSE netcdf4c month_1 years @@ -210,7 +212,7 @@ hist - TRUE + FALSE netcdf4c day_1 years @@ -218,7 +220,7 @@ hist - TRUE + FALSE netcdf4c hour_6 years @@ -226,7 +228,7 @@ hist - TRUE + FALSE netcdf4c hour_3 years @@ -253,7 +255,7 @@ hist - TRUE + FALSE netcdf4c month_1 years @@ -261,7 +263,7 @@ hist - TRUE + FALSE netcdf4c day_1 years @@ -269,7 +271,7 @@ hist - TRUE + FALSE netcdf4c hour_6 years @@ -277,7 +279,7 @@ hist - TRUE + FALSE netcdf4c hour_3 years @@ -304,7 +306,7 @@ hist - TRUE + FALSE netcdf4c month_1 years @@ -324,7 +326,7 @@ hist - TRUE + FALSE netcdf4c month_1 years @@ -332,7 +334,7 @@ hist - TRUE + FALSE netcdf4c day_1 years @@ -340,7 +342,7 @@ hist - TRUE + FALSE netcdf4c year_1 years @@ -356,7 +358,7 @@ hist - TRUE + FALSE netcdf4c year_1 years @@ -364,7 +366,7 @@ hist - TRUE + FALSE netcdf4c day_1 years @@ -384,11 +386,11 @@ hist - TRUE + FALSE netcdf4c year_1 years - 10 + 100 @@ -403,7 +405,7 @@ hist - TRUE + FALSE netcdf4c month_1 years diff --git a/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml b/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml index 25c298c4..a5744782 100644 --- a/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml +++ b/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml @@ -131,7 +131,7 @@ Applies to both model and control cases." diff --git a/timeseries/timeseries/cesm_tseries_generator.py b/timeseries/timeseries/cesm_tseries_generator.py index de42c56a..b5c23435 100755 --- a/timeseries/timeseries/cesm_tseries_generator.py +++ b/timeseries/timeseries/cesm_tseries_generator.py @@ -76,7 +76,8 @@ def commandline_options(): #============================================================================================== # readArchiveXML - read the $CASEROOT/env_timeseries.xml file and build the pyReshaper classes #============================================================================================== -def readArchiveXML(caseroot, input_rootdir, output_rootdir, casename, standalone, completechunk, debug, debugMsg): +def readArchiveXML(caseroot, input_rootdir, output_rootdir, casename, standalone, completechunk, + generate_all, debug, debugMsg): """ reads the $CASEROOT/env_timeseries.xml file and builds a fully defined list of reshaper specifications to be passed to the pyReshaper tool. @@ -87,6 +88,7 @@ def readArchiveXML(caseroot, input_rootdir, output_rootdir, casename, standalone casename (string) - casename standalone (boolean) - logical to indicate if postprocessing case is stand-alone or not completechunk (boolean) - end on a ragid boundary if True. Otherwise, do not create incomplete chunks if False + generate_all (boolean) - generate timeseries for all streams if True. Otherwise, use the tseries_create setting. """ specifiers = list() xml_tree = ET.ElementTree() @@ -126,7 +128,7 @@ def readArchiveXML(caseroot, input_rootdir, output_rootdir, casename, standalone tseries_create = file_spec.find("tseries_create").text # check if the tseries_create element is set to TRUE - if tseries_create.upper() in ["T","TRUE"]: + if tseries_create.upper() in ["T","TRUE"] or generate_all.upper() in ["T","TRUE"]: # check if tseries_format is an element for this file_spec and if it is valid if file_spec.find("tseries_output_format") is not None: @@ -170,9 +172,6 @@ def readArchiveXML(caseroot, input_rootdir, output_rootdir, casename, standalone tseries_output_dir = '/'.join( [output_rootdir, rootdir, 'proc/tseries', tseries_tper] ) debugMsg("tseries_output_dir = {0}".format(tseries_output_dir), header=True) - if not os.path.exists(tseries_output_dir): - os.makedirs(tseries_output_dir) - if comp+stream not in log.keys(): log[comp+stream] = {'slices':[],'index':0} ts_log_dates = log[comp+stream]['slices'] @@ -183,6 +182,9 @@ def readArchiveXML(caseroot, input_rootdir, output_rootdir, casename, standalone log[comp+stream]['index']=index for cn,cf in files.iteritems(): + if not os.path.exists(tseries_output_dir): + os.makedirs(tseries_output_dir) + history_files = cf['fn'] start_time_parts = cf['start'] last_time_parts = cf['end'] @@ -297,12 +299,14 @@ def main(options, scomm, rank, size, debug, debugMsg): tseries_output_rootdir = cesmEnv['TIMESERIES_OUTPUT_ROOTDIR'] case = cesmEnv['CASE'] completechunk = cesmEnv['TIMESERIES_COMPLETECHUNK'] + generate_all = cesmEnv['TIMESERIES_GENERATE_ALL'] if completechunk.upper() in ['T','TRUE']: completechunk = 1 else: completechunk = 0 specifiers,log = readArchiveXML(caseroot, tseries_input_rootdir, tseries_output_rootdir, - case, options.standalone, completechunk, debug, debugMsg) + case, options.standalone, completechunk, generate_all, + debug, debugMsg) scomm.sync() # specifiers is a list of pyreshaper specification objects ready to pass to the reshaper diff --git a/timeseries/timeseries/chunking.py b/timeseries/timeseries/chunking.py index e40119ae..997cef7a 100755 --- a/timeseries/timeseries/chunking.py +++ b/timeseries/timeseries/chunking.py @@ -8,7 +8,8 @@ def num2date(time_value, unit, calendar): if ('common_year' in unit): my_unit = unit.replace('common_year', 'day') - my_time_value = time_value * 365 +## my_time_value = time_value * 365 + my_time_value = int(round(time_value)) * 365 else: my_unit = unit my_time_value = time_value From 13f4deeebb7627b4d137406ae7016204591dfcdc Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Fri, 7 Apr 2017 19:23:56 -0600 Subject: [PATCH 19/27] add tool to count number of timeseries variables in a directory using ls -1 > time_series_vars.txt --- Tools/count_tseries_vars.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100755 Tools/count_tseries_vars.py diff --git a/Tools/count_tseries_vars.py b/Tools/count_tseries_vars.py new file mode 100755 index 00000000..509ec7a6 --- /dev/null +++ b/Tools/count_tseries_vars.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python + +import os, string + +file = open('time_series_vars.txt','r') +buffer = file.read() +tseries = buffer.split('\n') +tseries.pop() +for stream in tseries: + stream.replace(' Time-Series Variables: ','') + stream.replace('[','') + stream.replace(']','') + vars = stream.split(',') + print len(vars) From 523a7241e9d53fa9a2b7d40efccc10840b82717f Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Sun, 23 Apr 2017 18:12:28 -0600 Subject: [PATCH 20/27] Squashed 'mpi_utils/pyTools/' changes from dcacb8b..b9ffae4 b9ffae4 Merge pull request #12 from NCAR/devel 3cb063a Updating README 8d1965c Removing NumPy dependency. Not absolutely needed. 6a7373d Adding NumPy dependence. 4ce4262 Updating change log 59979fd Fixing tests to work with Python3 (no hanging) b98c611 Appropriate list conversions to work with Python3 17dfc87 Importing print_function from future 47286c4 Correcting test to work (still) with Python2.6 49204eb Updating vprinter to work with Python3 f5a5957 Updating time-keeper tests to Python3 28a1d50 Updating SimpleComm code to work with Python3. Seems to be hanging... 31e1447 Updating partition code to work with Python3 cb384db Suppressing Eclipse warnings c082f73 Adding back import. Was used, just not observed by Eclipse. d5d7a61 Removing unused import a548305 Bump version number 5b3cc56 Updating change log. Reversing order. git-subtree-dir: mpi_utils/pyTools git-subtree-split: b9ffae42dee7d40bd8796eadf6f51c11128330a8 --- CHANGES.rst | 80 +++---- README.rst | 5 +- source/asaptools/__init__.py | 5 +- source/asaptools/partition.py | 4 +- source/asaptools/simplecomm.py | 6 +- source/asaptools/test/partitionArrayTests.py | 39 ++-- source/asaptools/test/partitionTests.py | 29 +-- source/asaptools/test/simpleCommP1STests.py | 61 ++--- .../asaptools/test/simpleCommParDivTests.py | 209 ++++++++++-------- source/asaptools/test/simpleCommParTests.py | 169 +++++++------- source/asaptools/test/timekeeperTests.py | 34 +-- source/asaptools/test/vprinterTests.py | 18 +- source/asaptools/version.py | 2 +- source/asaptools/vprinter.py | 4 +- 14 files changed, 346 insertions(+), 319 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 7333ec64..9eef6f66 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -4,81 +4,61 @@ Change Log Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.txt file for details -Version 0.3 ------------ - -30 March 2015: - -- Repackaging the pyTools repo into a Python package with - installation software and Sphinx-style documentation - -Version 0.4 ------------ - -10 June 2015: - -- Updating install to include LICENSE -- Restructured source directory -- Upload to PyPI - -Version 0.4.1 +Version 0.6.0 ------------- -15 June 2015: - -- Bugfixes +- Allowing for support of all Python 2.6+ (including Python 3+) -Version 0.4.2 +Version 0.5.4 ------------- -29 June 2015: - -- Update setup script to setuptools (instead of distutils) +- Bugfix: Special catch for dtype='c' (C-type char arrays) in check for + Numpy arrays being bufferable -Version 0.5.0 +Version 0.5.3 ------------- -23 September 2015: +- Updates just for PyPI release -- Now requires Python >=2.7 and <3.0 -- Using more advanced features of Python 2.7 (over 2.6) +Version 0.5.2 +------------- -2 March 2016: +- Improved testing for send/recv data types +- Backwards compatability with mpi4py version 1.3.1 -- Changed Numpy NDArray type-checking to allow for masked arrays, instead of - just NDArrays - Version 0.5.1 ------------- -2 March 2016: - - Checking dtype of Numpy NDArrays before determing if buffered send/recv calls can be used. -Version 0.5.2 +Version 0.5.0 ------------- -2 March 2016: - -- Improved testing for send/recv data types +- Now requires Python >=2.7 and <3.0 +- Using more advanced features of Python 2.7 (over 2.6) +- Changed Numpy NDArray type-checking to allow for masked arrays, instead of + just NDArrays -3 March 2016: +Version 0.4.2 +------------- -- Backwards compatability with mpi4py version 1.3.1 +- Update setup script to setuptools (instead of distutils) -Version 0.5.3 +Version 0.4.1 ------------- -3 March 2016: - -- Updates just for PyPI release +- Bugfixes +Version 0.4 +----------- -Version 0.5.4 -------------- +- Updating install to include LICENSE +- Restructured source directory +- Upload to PyPI -4 March 2016: +Version 0.3 +----------- -- Bugfix: Special catch for dtype='c' (C-type char arrays) in check for - Numpy arrays being bufferable +- Repackaging the pyTools repo into a Python package with + installation software and Sphinx-style documentation diff --git a/README.rst b/README.rst index b7348f14..8bf92278 100644 --- a/README.rst +++ b/README.rst @@ -31,8 +31,8 @@ Python packages. Dependencies ------------ -All of the ASAP Python Toolbox are written to work with Python 2.7+ (but not -Python 3.0+). The vprinter, timekeeper, and partition modules are pure +All of the ASAP Python Toolbox are written to work with Python 2.6+ (including +Python 3+). The vprinter, timekeeper, and partition modules are pure Python. The simplecomm module depends on mpi4py (>=1.3). This implies the dependency: @@ -75,7 +75,6 @@ change into the top-level source directory, check out the most recent tag, and run the Python distutils setup. On unix, this involves:: $ cd ASAPPyTools - $ git checkout [latest tag] $ python setup.py install [--prefix-/path/to/install/location] The prefix is optional, as the default prefix is typically /usr/local on diff --git a/source/asaptools/__init__.py b/source/asaptools/__init__.py index f035d509..78a7f117 100644 --- a/source/asaptools/__init__.py +++ b/source/asaptools/__init__.py @@ -12,4 +12,7 @@ Send questions and comments to Kevin Paul (kpaul@ucar.edu). """ -from version import __version__ +from __future__ import absolute_import + +from .version import __version__ + diff --git a/source/asaptools/partition.py b/source/asaptools/partition.py index 8f708c7e..5ce16ead 100644 --- a/source/asaptools/partition.py +++ b/source/asaptools/partition.py @@ -181,10 +181,10 @@ def __call__(self, data, index=0, size=1): if self._is_indexable(data): (lenpart, remdata) = divmod(len(data), size) psizes = [lenpart] * size - for i in xrange(remdata): + for i in range(remdata): psizes[i] += 1 ibeg = 0 - for i in xrange(index): + for i in range(index): ibeg += psizes[i] iend = ibeg + psizes[index] return data[ibeg:iend] diff --git a/source/asaptools/simplecomm.py b/source/asaptools/simplecomm.py index 663b59c2..8f57a3db 100644 --- a/source/asaptools/simplecomm.py +++ b/source/asaptools/simplecomm.py @@ -118,7 +118,7 @@ See the LICENSE.txt file for details """ -from functools import partial +from functools import partial # @UnusedImport from collections import defaultdict # Define the supported reduction operators @@ -689,7 +689,7 @@ def partition(self, data=None, func=None, involved=False, tag=0): if self.is_manager(): op = func if func else lambda *x: x[0][x[1]::x[2]] j = 1 if not involved else 0 - for i in xrange(1, self.get_size()): + for i in range(1, self.get_size()): # Get the part of the data to send to rank i part = op(data, i - j, self.get_size() - j) @@ -983,7 +983,7 @@ def divide(self, group): RuntimeError: If executed during a serial or 1-rank parallel run """ if self.get_size() > 1: - allgroups = list(set(self._comm.allgather(group))) + allgroups = list(self._comm.allgather(group)) color = allgroups.index(group) monocomm = SimpleCommMPI() monocomm._color = color diff --git a/source/asaptools/test/partitionArrayTests.py b/source/asaptools/test/partitionArrayTests.py index 2c80ec1c..ed57cecf 100644 --- a/source/asaptools/test/partitionArrayTests.py +++ b/source/asaptools/test/partitionArrayTests.py @@ -4,9 +4,12 @@ Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.txt file for details """ + +from __future__ import print_function + import unittest from asaptools import partition -import numpy +from numpy import arange, array, dstack, testing from os import linesep @@ -28,7 +31,7 @@ class partitionArrayTests(unittest.TestCase): """ def setUp(self): - data = [numpy.arange(3), numpy.arange(5), numpy.arange(7)] + data = [arange(3), arange(5), arange(7)] indices_sizes = [(0, 1), (1, 3), (5, 9)] self.inputs = [] for d in data: @@ -51,21 +54,21 @@ def testDuplicate(self): expected = inp[0] msg = test_info_msg( 'Duplicate', inp[0], inp[1], inp[2], actual, expected) - print msg - numpy.testing.assert_array_equal(actual, expected, msg) + print(msg) + testing.assert_array_equal(actual, expected, msg) def testEquallength(self): - results = [numpy.arange(3), numpy.array([1]), numpy.array([]), - numpy.arange(5), numpy.array([2, 3]), numpy.array([]), - numpy.arange(7), numpy.array([3, 4]), numpy.array([5])] + results = [arange(3), array([1]), array([]), + arange(5), array([2, 3]), array([]), + arange(7), array([3, 4]), array([5])] for (ii, inp) in enumerate(self.inputs): pfunc = partition.EqualLength() actual = pfunc(*inp) expected = results[ii] msg = test_info_msg( 'EqualLength', inp[0], inp[1], inp[2], actual, expected) - print msg - numpy.testing.assert_array_equal(actual, expected, msg) + print(msg) + testing.assert_array_equal(actual, expected, msg) def testEqualStride(self): for inp in self.inputs: @@ -74,35 +77,35 @@ def testEqualStride(self): expected = inp[0][inp[1]::inp[2]] msg = test_info_msg( 'EqualStride', inp[0], inp[1], inp[2], actual, expected) - print msg - numpy.testing.assert_array_equal(actual, expected, msg) + print(msg) + testing.assert_array_equal(actual, expected, msg) def testSortedStride(self): for inp in self.inputs: - weights = numpy.array([(20 - i) for i in inp[0]]) + weights = array([(20 - i) for i in inp[0]]) pfunc = partition.SortedStride() - data = numpy.dstack((inp[0], weights))[0] + data = dstack((inp[0], weights))[0] actual = pfunc(data, inp[1], inp[2]) expected = inp[0][::-1] expected = expected[inp[1]::inp[2]] msg = test_info_msg( 'SortedStride', data, inp[1], inp[2], actual, expected) - print msg - numpy.testing.assert_array_equal(actual, expected, msg) + print(msg) + testing.assert_array_equal(actual, expected, msg) def testWeightBalanced(self): results = [set([0, 1, 2]), set([1]), set(), set([3, 2, 4, 1, 0]), set([1]), set(), set([3, 2, 4, 1, 5, 0, 6]), set([3, 6]), set([4])] for (ii, inp) in enumerate(self.inputs): - weights = numpy.array([(3 - i) ** 2 for i in inp[0]]) + weights = array([(3 - i) ** 2 for i in inp[0]]) pfunc = partition.WeightBalanced() - data = numpy.dstack((inp[0], weights))[0] + data = dstack((inp[0], weights))[0] actual = set(pfunc(data, inp[1], inp[2])) expected = results[ii] msg = test_info_msg( 'WeightBalanced', data, inp[1], inp[2], actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) diff --git a/source/asaptools/test/partitionTests.py b/source/asaptools/test/partitionTests.py index e3a90c64..512745c1 100644 --- a/source/asaptools/test/partitionTests.py +++ b/source/asaptools/test/partitionTests.py @@ -4,6 +4,9 @@ Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.txt file for details """ + +from __future__ import print_function + import unittest from asaptools import partition from os import linesep @@ -27,7 +30,7 @@ class partitionTests(unittest.TestCase): """ def setUp(self): - data = [range(3), range(5), range(7)] + data = [list(range(3)), list(range(5)), list(range(7))] indices_sizes = [(0, 1), (1, 3), (5, 9)] self.inputs = [] for d in data: @@ -50,43 +53,43 @@ def testDuplicate(self): expected = inp[0] msg = test_info_msg( 'Duplicate', inp[0], inp[1], inp[2], actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) def testEquallength(self): - results = [range(3), [1], [], - range(5), [2, 3], [], - range(7), [3, 4], [5]] + results = [list(range(3)), [1], [], + list(range(5)), [2, 3], [], + list(range(7)), [3, 4], [5]] for (ii, inp) in enumerate(self.inputs): pfunc = partition.EqualLength() actual = pfunc(*inp) expected = results[ii] msg = test_info_msg( 'EqualLength', inp[0], inp[1], inp[2], actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) def testEqualStride(self): for inp in self.inputs: pfunc = partition.EqualStride() actual = pfunc(*inp) - expected = inp[0][inp[1]::inp[2]] + expected = list(inp[0][inp[1]::inp[2]]) msg = test_info_msg( 'EqualStride', inp[0], inp[1], inp[2], actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) def testSortedStride(self): for inp in self.inputs: weights = [(20 - i) for i in inp[0]] pfunc = partition.SortedStride() - actual = pfunc(zip(inp[0], weights), inp[1], inp[2]) - expected = inp[0][:] + actual = pfunc(list(zip(inp[0], weights)), inp[1], inp[2]) + expected = list(inp[0][:]) expected.reverse() expected = expected[inp[1]::inp[2]] msg = test_info_msg( 'SortedStride', zip(inp[0], weights), inp[1], inp[2], actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) def testWeightBalanced(self): @@ -96,11 +99,11 @@ def testWeightBalanced(self): for (ii, inp) in enumerate(self.inputs): weights = [(3 - i) ** 2 for i in inp[0]] pfunc = partition.WeightBalanced() - actual = set(pfunc(zip(inp[0], weights), inp[1], inp[2])) + actual = set(pfunc(list(zip(inp[0], weights)), inp[1], inp[2])) expected = results[ii] msg = test_info_msg( 'WeightBalanced', zip(inp[0], weights), inp[1], inp[2], actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) diff --git a/source/asaptools/test/simpleCommP1STests.py b/source/asaptools/test/simpleCommP1STests.py index 79d78299..8738f952 100644 --- a/source/asaptools/test/simpleCommP1STests.py +++ b/source/asaptools/test/simpleCommP1STests.py @@ -9,6 +9,9 @@ Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.txt file for details """ + +from __future__ import print_function + import unittest import numpy as np @@ -16,7 +19,7 @@ from asaptools.partition import EqualStride, Duplicate from os import linesep from mpi4py import MPI -MPI_COMM_WORLD = MPI.COMM_WORLD +MPI_COMM_WORLD = MPI.COMM_WORLD # @UndefinedVariable def test_info_msg(name, data, sresult, presult): @@ -49,14 +52,14 @@ def testGetSize(self): sresult = self.scomm.get_size() presult = self.pcomm.get_size() msg = test_info_msg('get_size()', None, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testIsManager(self): sresult = self.scomm.is_manager() presult = self.pcomm.is_manager() msg = test_info_msg('is_manager()', None, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testSumInt(self): @@ -64,7 +67,7 @@ def testSumInt(self): sresult = self.scomm.allreduce(data, 'sum') presult = self.pcomm.allreduce(data, 'sum') msg = test_info_msg('sum(int)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testSumList(self): @@ -72,7 +75,7 @@ def testSumList(self): sresult = self.scomm.allreduce(data, 'sum') presult = self.pcomm.allreduce(data, 'sum') msg = test_info_msg('sum(list)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testSumDict(self): @@ -80,7 +83,7 @@ def testSumDict(self): sresult = self.scomm.allreduce(data, 'sum') presult = self.pcomm.allreduce(data, 'sum') msg = test_info_msg('sum(list)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testSumArray(self): @@ -88,7 +91,7 @@ def testSumArray(self): sresult = self.scomm.allreduce(data, 'sum') presult = self.pcomm.allreduce(data, 'sum') msg = test_info_msg('sum(array)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testMaxInt(self): @@ -96,7 +99,7 @@ def testMaxInt(self): sresult = self.scomm.allreduce(data, 'max') presult = self.pcomm.allreduce(data, 'max') msg = test_info_msg('max(int)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testMaxList(self): @@ -104,7 +107,7 @@ def testMaxList(self): sresult = self.scomm.allreduce(data, 'max') presult = self.pcomm.allreduce(data, 'max') msg = test_info_msg('max(list)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testMaxDict(self): @@ -112,7 +115,7 @@ def testMaxDict(self): sresult = self.scomm.allreduce(data, 'max') presult = self.pcomm.allreduce(data, 'max') msg = test_info_msg('max(dict)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testMaxArray(self): @@ -120,7 +123,7 @@ def testMaxArray(self): sresult = self.scomm.allreduce(data, 'max') presult = self.pcomm.allreduce(data, 'max') msg = test_info_msg('max(array)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testPartitionInt(self): @@ -128,7 +131,7 @@ def testPartitionInt(self): sresult = self.scomm.partition(data, func=Duplicate()) presult = self.pcomm.partition(data, func=Duplicate()) msg = test_info_msg('partition(int)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testPartitionIntInvolved(self): @@ -136,7 +139,7 @@ def testPartitionIntInvolved(self): sresult = self.scomm.partition(data, func=Duplicate(), involved=True) presult = self.pcomm.partition(data, func=Duplicate(), involved=True) msg = test_info_msg('partition(int, T)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testPartitionList(self): @@ -144,7 +147,7 @@ def testPartitionList(self): sresult = self.scomm.partition(data, func=EqualStride()) presult = self.pcomm.partition(data, func=EqualStride()) msg = test_info_msg('partition(list)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testPartitionListInvolved(self): @@ -152,7 +155,7 @@ def testPartitionListInvolved(self): sresult = self.scomm.partition(data, func=EqualStride(), involved=True) presult = self.pcomm.partition(data, func=EqualStride(), involved=True) msg = test_info_msg('partition(list, T)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testPartitionArray(self): @@ -160,7 +163,7 @@ def testPartitionArray(self): sresult = self.scomm.partition(data) presult = self.pcomm.partition(data) msg = test_info_msg('partition(array)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testPartitionArrayInvolved(self): @@ -168,7 +171,7 @@ def testPartitionArrayInvolved(self): sresult = self.scomm.partition(data, involved=True) presult = self.pcomm.partition(data, involved=True) msg = test_info_msg('partition(array, T)', data, sresult, presult) - print msg + print(msg) np.testing.assert_array_equal(sresult, presult, msg) def testPartitionStrArray(self): @@ -176,7 +179,7 @@ def testPartitionStrArray(self): sresult = self.scomm.partition(data) presult = self.pcomm.partition(data) msg = test_info_msg('partition(string-array)', data, sresult, presult) - print msg + print(msg) self.assertEqual(sresult, presult, msg) def testPartitionStrArrayInvolved(self): @@ -184,7 +187,7 @@ def testPartitionStrArrayInvolved(self): sresult = self.scomm.partition(data, involved=True) presult = self.pcomm.partition(data, involved=True) msg = test_info_msg('partition(string-array, T)', data, sresult, presult) - print msg + print(msg) np.testing.assert_array_equal(sresult, presult, msg) def testRationError(self): @@ -201,12 +204,16 @@ def testCollectError(self): if __name__ == "__main__": hline = '=' * 70 if MPI_COMM_WORLD.Get_rank() == 0: - print hline - print 'STANDARD OUTPUT FROM ALL TESTS:' - print hline + print(hline) + print('STANDARD OUTPUT FROM ALL TESTS:') + print(hline) MPI_COMM_WORLD.Barrier() - from cStringIO import StringIO + try: + from cStringIO import StringIO + except ImportError: + from io import StringIO + mystream = StringIO() tests = unittest.TestLoader().loadTestsFromTestCase(SimpleCommP1STests) unittest.TextTestRunner(stream=mystream).run(tests) @@ -215,7 +222,7 @@ def testCollectError(self): results = MPI_COMM_WORLD.gather(mystream.getvalue()) if MPI_COMM_WORLD.Get_rank() == 0: for rank, result in enumerate(results): - print hline - print 'TESTS RESULTS FOR RANK ' + str(rank) + ':' - print hline - print str(result) + print(hline) + print('TESTS RESULTS FOR RANK ' + str(rank) + ':') + print(hline) + print(str(result)) diff --git a/source/asaptools/test/simpleCommParDivTests.py b/source/asaptools/test/simpleCommParDivTests.py index e5cbcff6..518d3ce7 100644 --- a/source/asaptools/test/simpleCommParDivTests.py +++ b/source/asaptools/test/simpleCommParDivTests.py @@ -4,13 +4,16 @@ Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.txt file for details """ + +from __future__ import print_function + import unittest from asaptools import simplecomm from asaptools.partition import EqualStride, Duplicate from os import linesep as eol from mpi4py import MPI -MPI_COMM_WORLD = MPI.COMM_WORLD +MPI_COMM_WORLD = MPI.COMM_WORLD # @UndefinedVariable def test_info_msg(rank, size, name, data, actual, expected): @@ -36,125 +39,133 @@ def setUp(self): self.groups = ['a', 'b', 'c'] # This MPI process's rank, color, and group after division - self.rank = self.grank / len(self.groups) - self.color = self.grank % len(self.groups) + self.rank = int(self.grank // len(self.groups)) + self.color = int(self.grank % len(self.groups)) self.group = self.groups[self.color] # The divided communicators (monocolor and multicolor) self.monocomm, self.multicomm = self.gcomm.divide(self.group) # Every MPI process's color, group, and grank after division - self.all_colors = [i % len(self.groups) for i in xrange(self.gsize)] + self.all_colors = [i % len(self.groups) for i in range(self.gsize)] self.all_groups = [self.groups[i] for i in self.all_colors] - self.all_ranks = [i / len(self.groups) for i in xrange(self.gsize)] + self.all_ranks = [int(i // len(self.groups)) for i in range(self.gsize)] def tearDown(self): pass + def testGlobalRanksMatch(self): + actual = self.gcomm.get_rank() + expected = self.grank + msg = test_info_msg(self.grank, self.gsize, 'comm.get_rank() == COMM_WORLD.Get_rank()', + None, actual, expected) + print(msg) + self.assertEqual(actual, expected, msg) + def testMonoGetRank(self): actual = self.monocomm.get_rank() expected = self.rank msg = test_info_msg(self.grank, self.gsize, 'mono.get_rank()', None, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMultiGetRank(self): actual = self.multicomm.get_rank() expected = self.color msg = test_info_msg(self.grank, self.gsize, 'multi.get_rank()', None, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMonoGetSize(self): actual = self.monocomm.get_size() expected = self.all_colors.count(self.color) msg = test_info_msg(self.grank, self.gsize, 'mono.get_size()', None, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMultiGetSize(self): actual = self.multicomm.get_size() expected = self.all_ranks.count(self.rank) msg = test_info_msg(self.grank, self.gsize, 'multi.get_size()', None, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMonoIsManager(self): actual = self.monocomm.is_manager() expected = (self.rank == 0) msg = test_info_msg(self.grank, self.gsize, 'mono.is_manager()', None, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMultiIsManager(self): actual = self.multicomm.is_manager() expected = (self.color == 0) msg = test_info_msg(self.grank, self.gsize, 'multi.is_manager()', None, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMonoSumInt(self): data = self.color + 1 actual = self.monocomm.allreduce(data, 'sum') expected = self.monocomm.get_size() * data msg = test_info_msg(self.grank, self.gsize, 'mono.sum(int)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMultiSumInt(self): data = (self.rank + 1) actual = self.multicomm.allreduce(data, 'sum') expected = self.multicomm.get_size() * data msg = test_info_msg(self.grank, self.gsize, 'multi.sum(int)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMonoSumList(self): - data = range(5) + data = list(range(5)) actual = self.monocomm.allreduce(data, 'sum') expected = self.monocomm.get_size() * sum(data) msg = test_info_msg(self.grank, self.gsize, 'mono.sum(list)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMultiSumList(self): - data = range(5) + data = list(range(5)) actual = self.multicomm.allreduce(data, 'sum') expected = self.multicomm.get_size() * sum(data) msg = test_info_msg(self.grank, self.gsize, 'multi.sum(list)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMonoSumDict(self): - data = {'a': range(3), 'b': [5, 7]} + data = {'a': list(range(3)), 'b': [5, 7]} actual = self.monocomm.allreduce(data, 'sum') expected = {'a': self.monocomm.get_size() * sum(range(3)), 'b': self.monocomm.get_size() * sum([5, 7])} msg = test_info_msg(self.grank, self.gsize, 'mono.sum(dict)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMultiSumDict(self): - data = {'a': range(3), 'b': [5, 7]} + data = {'a': list(range(3)), 'b': [5, 7]} actual = self.multicomm.allreduce(data, 'sum') expected = {'a': self.multicomm.get_size() * sum(range(3)), 'b': self.multicomm.get_size() * sum([5, 7])} msg = test_info_msg(self.grank, self.gsize, 'multi.sum(dict)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMonoPartitionInt(self): data = self.grank actual = self.monocomm.partition(data, func=Duplicate()) @@ -164,9 +175,9 @@ def testMonoPartitionInt(self): expected = self.color # By chance! msg = test_info_msg(self.grank, self.gsize, 'mono.partition(int)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMultiPartitionInt(self): data = self.grank actual = self.multicomm.partition(data, func=Duplicate()) @@ -176,18 +187,18 @@ def testMultiPartitionInt(self): expected = self.rank * len(self.groups) msg = test_info_msg(self.grank, self.gsize, 'multi.partition(int)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMonoPartitionIntInvolved(self): data = self.grank actual = self.monocomm.partition(data, func=Duplicate(), involved=True) expected = self.color # By chance! msg = test_info_msg(self.grank, self.gsize, 'mono.partition(int,T)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMultiPartitionIntInvolved(self): data = self.grank actual = self.multicomm.partition( @@ -195,73 +206,73 @@ def testMultiPartitionIntInvolved(self): expected = self.rank * len(self.groups) msg = test_info_msg(self.grank, self.gsize, 'multi.partition(int,T)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMonoPartitionList(self): if self.monocomm.is_manager(): - data = range(10 + self.grank) + data = list(range(10 + self.grank)) else: data = None actual = self.monocomm.partition(data) if self.monocomm.is_manager(): expected = None else: - expected = range(self.rank - 1, 10 + self.color, - self.monocomm.get_size() - 1) + expected = list(range(self.rank - 1, 10 + self.color, + self.monocomm.get_size() - 1)) msg = test_info_msg(self.grank, self.gsize, 'mono.partition(list)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMultiPartitionList(self): if self.multicomm.is_manager(): - data = range(10 + self.grank) + data = list(range(10 + self.grank)) else: data = None actual = self.multicomm.partition(data) if self.multicomm.is_manager(): expected = None else: - expected = range(self.color - 1, 10 + self.rank * len(self.groups), - self.multicomm.get_size() - 1) + expected = list(range(self.color - 1, 10 + self.rank * len(self.groups), + self.multicomm.get_size() - 1)) msg = test_info_msg(self.grank, self.gsize, 'multi.partition(list)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMonoPartitionListInvolved(self): if self.monocomm.is_manager(): - data = range(10 + self.grank) + data = list(range(10 + self.grank)) else: data = None actual = self.monocomm.partition(data, func=EqualStride(), involved=True) - expected = range(self.rank, 10 + self.color, self.monocomm.get_size()) + expected = list(range(self.rank, 10 + self.color, self.monocomm.get_size())) msg = test_info_msg(self.grank, self.gsize, 'mono.partition(list,T)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMultiPartitionListInvolved(self): if self.multicomm.is_manager(): - data = range(10 + self.grank) + data = list(range(10 + self.grank)) else: data = None actual = self.multicomm.partition(data, func=EqualStride(), involved=True) - expected = range(self.color, 10 + self.rank * len(self.groups), - self.multicomm.get_size()) + expected = list(range(self.color, 10 + self.rank * len(self.groups), + self.multicomm.get_size())) msg = test_info_msg(self.grank, self.gsize, 'multi.partition(list,T)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMonoCollectInt(self): if self.monocomm.is_manager(): data = None actual = [self.monocomm.collect() - for _ in xrange(1, self.monocomm.get_size())] + for _ in range(1, self.monocomm.get_size())] expected = [i for i in enumerate(range(len(self.groups) + self.color, self.gsize, @@ -273,18 +284,18 @@ def testMonoCollectInt(self): self.monocomm.sync() msg = test_info_msg(self.grank, self.gsize, 'mono.collect(int)', data, actual, expected) - print msg + print(msg) if self.monocomm.is_manager(): for a in actual: self.assertTrue(a in expected, msg) else: self.assertEqual(actual, expected, msg) - + def testMultiCollectInt(self): if self.multicomm.is_manager(): data = None actual = [self.multicomm.collect() - for _ in xrange(1, self.multicomm.get_size())] + for _ in range(1, self.multicomm.get_size())] expected = [i for i in enumerate([j + self.rank * len(self.groups) for j in range(1, self.multicomm.get_size())], 1)] @@ -295,58 +306,58 @@ def testMultiCollectInt(self): self.multicomm.sync() msg = test_info_msg(self.grank, self.gsize, 'multi.collect(int)', data, actual, expected) - print msg + print(msg) if self.multicomm.is_manager(): for a in actual: self.assertTrue(a in expected, msg) else: self.assertEqual(actual, expected, msg) - + def testMonoCollectList(self): if self.monocomm.is_manager(): data = None actual = [self.monocomm.collect() - for _ in xrange(1, self.monocomm.get_size())] - expected = [(i, range(x)) for i, x in + for _ in range(1, self.monocomm.get_size())] + expected = [(i, list(range(x))) for i, x in enumerate(range(len(self.groups) + self.color, self.gsize, len(self.groups)), 1)] else: - data = range(self.grank) + data = list(range(self.grank)) actual = self.monocomm.collect(data) expected = None self.monocomm.sync() msg = test_info_msg(self.grank, self.gsize, 'mono.collect(list)', data, actual, expected) - print msg + print(msg) if self.monocomm.is_manager(): for a in actual: self.assertTrue(a in expected, msg) else: self.assertEqual(actual, expected, msg) - + def testMultiCollectList(self): if self.multicomm.is_manager(): data = None actual = [self.multicomm.collect() - for _ in xrange(1, self.multicomm.get_size())] - expected = [(i, range(x)) for (i, x) in + for _ in range(1, self.multicomm.get_size())] + expected = [(i, list(range(x))) for (i, x) in enumerate([j + self.rank * len(self.groups) for j in range(1, self.multicomm.get_size())], 1)] else: - data = range(self.grank) + data = list(range(self.grank)) actual = self.multicomm.collect(data) expected = None self.multicomm.sync() msg = test_info_msg(self.grank, self.gsize, 'multi.collect(list)', data, actual, expected) - print msg + print(msg) if self.multicomm.is_manager(): for a in actual: self.assertTrue(a in expected, msg) else: self.assertEqual(actual, expected, msg) - + def testMonoRationInt(self): if self.monocomm.is_manager(): data = [100 * self.color + i @@ -361,12 +372,12 @@ def testMonoRationInt(self): self.monocomm.sync() msg = test_info_msg(self.grank, self.gsize, 'mono.ration(int)', data, actual, expected) - print msg + print(msg) if self.monocomm.is_manager(): self.assertEqual(actual, expected, msg) else: self.assertTrue(actual in expected, msg) - + def testMultiRationInt(self): if self.multicomm.is_manager(): data = [100 * self.rank + i @@ -381,66 +392,70 @@ def testMultiRationInt(self): self.multicomm.sync() msg = test_info_msg(self.grank, self.gsize, 'multi.ration(int)', data, actual, expected) - print msg + print(msg) if self.multicomm.is_manager(): self.assertEqual(actual, expected, msg) else: self.assertTrue(actual in expected, msg) - + def testTreeScatterInt(self): if self.gcomm.is_manager(): data = 10 else: data = None - + if self.monocomm.is_manager(): mydata = self.multicomm.partition( data, func=Duplicate(), involved=True) else: mydata = None - + actual = self.monocomm.partition( mydata, func=Duplicate(), involved=True) expected = 10 msg = test_info_msg(self.grank, self.gsize, 'TreeScatter(int)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testTreeGatherInt(self): data = self.grank - + if self.monocomm.is_manager(): mydata = [data] for _ in range(1, self.monocomm.get_size()): mydata.append(self.monocomm.collect()[1]) else: mydata = self.monocomm.collect(data) - + if self.gcomm.is_manager(): actual = [mydata] - for _ in xrange(1, self.multicomm.get_size()): + for _ in range(1, self.multicomm.get_size()): actual.append(self.multicomm.collect()[1]) elif self.monocomm.is_manager(): actual = self.multicomm.collect(mydata) else: actual = None - + expected = 10 msg = test_info_msg(self.grank, self.gsize, 'TreeGather(int)', data, actual, expected) - print msg + print(msg) if __name__ == "__main__": hline = '=' * 70 if MPI_COMM_WORLD.Get_rank() == 0: - print hline - print 'STANDARD OUTPUT FROM ALL TESTS:' - print hline + print(hline) + print('STANDARD OUTPUT FROM ALL TESTS:') + print(hline) MPI_COMM_WORLD.Barrier() - from cStringIO import StringIO + try: + from cStringIO import StringIO + except ImportError: + from io import StringIO + mystream = StringIO() tests = unittest.TestLoader().loadTestsFromTestCase(SimpleCommParDivTests) unittest.TextTestRunner(stream=mystream).run(tests) @@ -449,7 +464,7 @@ def testTreeGatherInt(self): results = MPI_COMM_WORLD.gather(mystream.getvalue()) if MPI_COMM_WORLD.Get_rank() == 0: for rank, result in enumerate(results): - print hline - print 'TESTS RESULTS FOR RANK ' + str(rank) + ':' - print hline - print str(result) + print(hline) + print('TESTS RESULTS FOR RANK ' + str(rank) + ':') + print(hline) + print(str(result)) diff --git a/source/asaptools/test/simpleCommParTests.py b/source/asaptools/test/simpleCommParTests.py index 7c6f4f1d..44779303 100644 --- a/source/asaptools/test/simpleCommParTests.py +++ b/source/asaptools/test/simpleCommParTests.py @@ -4,6 +4,9 @@ Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.txt file for details """ + +from __future__ import print_function, unicode_literals + import unittest import numpy as np @@ -11,7 +14,7 @@ from asaptools.partition import EqualStride, Duplicate from os import linesep as eol from mpi4py import MPI -MPI_COMM_WORLD = MPI.COMM_WORLD +MPI_COMM_WORLD = MPI.COMM_WORLD # @UndefinedVariable def test_info_msg(rank, size, name, data, actual, expected): @@ -39,44 +42,44 @@ def testGetSize(self): expected = self.size msg = test_info_msg( self.rank, self.size, 'get_size()', None, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testIsManager(self): actual = self.gcomm.is_manager() expected = (self.rank == 0) msg = test_info_msg( self.rank, self.size, 'is_manager()', None, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testSumInt(self): data = 5 actual = self.gcomm.allreduce(data, 'sum') expected = self.size * 5 msg = test_info_msg( self.rank, self.size, 'sum(int)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testSumList(self): data = range(5) actual = self.gcomm.allreduce(data, 'sum') expected = self.size * sum(data) msg = test_info_msg( self.rank, self.size, 'sum(list)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testSumArray(self): data = np.arange(5) actual = self.gcomm.allreduce(data, 'sum') expected = self.size * sum(data) msg = test_info_msg( self.rank, self.size, 'sum(array)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testSumDict(self): data = {'a': range(3), 'b': [5, 7]} actual = self.gcomm.allreduce(data, 'sum') @@ -84,45 +87,45 @@ def testSumDict(self): 'a': self.size * sum(range(3)), 'b': self.size * sum([5, 7])} msg = test_info_msg( self.rank, self.size, 'sum(dict)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMaxInt(self): data = self.rank actual = self.gcomm.allreduce(data, 'max') expected = self.size - 1 msg = test_info_msg( self.rank, self.size, 'max(int)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMaxList(self): data = range(2 + self.rank) actual = self.gcomm.allreduce(data, 'max') expected = self.size msg = test_info_msg( self.rank, self.size, 'max(list)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMaxArray(self): data = np.arange(2 + self.rank) actual = self.gcomm.allreduce(data, 'max') expected = self.size msg = test_info_msg( self.rank, self.size, 'max(array)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testMaxDict(self): data = {'rank': self.rank, 'range': range(2 + self.rank)} actual = self.gcomm.allreduce(data, 'max') expected = {'rank': self.size - 1, 'range': self.size} msg = test_info_msg( self.rank, self.size, 'max(dict)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testPartitionInt(self): if self.gcomm.is_manager(): data = 10 @@ -135,9 +138,9 @@ def testPartitionInt(self): expected = 10 msg = test_info_msg( self.rank, self.size, 'partition(int)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testPartitionIntInvolved(self): if self.gcomm.is_manager(): data = 10 @@ -147,9 +150,9 @@ def testPartitionIntInvolved(self): expected = 10 msg = test_info_msg( self.rank, self.size, 'partition(int, T)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testPartitionList(self): if self.gcomm.is_manager(): data = range(10) @@ -162,9 +165,9 @@ def testPartitionList(self): expected = range(self.rank - 1, 10, self.size - 1) msg = test_info_msg( self.rank, self.size, 'partition(list)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testPartitionListInvolved(self): if self.gcomm.is_manager(): data = range(10) @@ -174,9 +177,9 @@ def testPartitionListInvolved(self): expected = range(self.rank, 10, self.size) msg = test_info_msg( self.rank, self.size, 'partition(list, T)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) - + def testPartitionArray(self): if self.gcomm.is_manager(): data = np.arange(10) @@ -189,12 +192,12 @@ def testPartitionArray(self): expected = np.arange(self.rank - 1, 10, self.size - 1) msg = test_info_msg( self.rank, self.size, 'partition(array)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): self.assertEqual(actual, expected, msg) else: np.testing.assert_array_equal(actual, expected, msg) - + def testPartitionStrArray(self): indata = list('abcdefghi') if self.gcomm.is_manager(): @@ -208,12 +211,12 @@ def testPartitionStrArray(self): expected = np.array(indata[self.rank - 1::self.size - 1]) msg = test_info_msg( self.rank, self.size, 'partition(string-array)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): self.assertEqual(actual, expected, msg) else: np.testing.assert_array_equal(actual, expected, msg) - + def testPartitionCharArray(self): indata = list('abcdefghi') if self.gcomm.is_manager(): @@ -227,12 +230,12 @@ def testPartitionCharArray(self): expected = np.array(indata[self.rank - 1::self.size - 1], dtype='c') msg = test_info_msg( self.rank, self.size, 'partition(char-array)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): self.assertEqual(actual, expected, msg) else: np.testing.assert_array_equal(actual, expected, msg) - + def testPartitionArrayInvolved(self): if self.gcomm.is_manager(): data = np.arange(10) @@ -242,13 +245,13 @@ def testPartitionArrayInvolved(self): expected = np.arange(self.rank, 10, self.size) msg = test_info_msg( self.rank, self.size, 'partition(array, T)', data, actual, expected) - print msg + print(msg) np.testing.assert_array_equal(actual, expected, msg) - + def testCollectInt(self): if self.gcomm.is_manager(): data = None - actual = [self.gcomm.collect() for _ in xrange(1, self.size)] + actual = [self.gcomm.collect() for _ in range(1, self.size)] expected = [i for i in enumerate(range(1, self.size), 1)] else: data = self.rank @@ -257,18 +260,18 @@ def testCollectInt(self): self.gcomm.sync() msg = test_info_msg( self.rank, self.size, 'collect(int)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): for a in actual: self.assertTrue(a in expected, msg) else: self.assertEqual(actual, expected, msg) - + def testCollectList(self): if self.gcomm.is_manager(): data = None - actual = [self.gcomm.collect() for _ in xrange(1, self.size)] - expected = [(i, range(i)) for i in xrange(1, self.size)] + actual = [self.gcomm.collect() for _ in range(1, self.size)] + expected = [(i, range(i)) for i in range(1, self.size)] else: data = range(self.rank) actual = self.gcomm.collect(data) @@ -276,20 +279,20 @@ def testCollectList(self): self.gcomm.sync() msg = test_info_msg( self.rank, self.size, 'collect(list)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): for a in actual: self.assertTrue(a in expected, msg) else: self.assertEqual(actual, expected, msg) - + def testCollectArray(self): if self.gcomm.is_manager(): data = None actual = [(i, list(x)) for (i, x) in [self.gcomm.collect() - for _ in xrange(1, self.size)]] + for _ in range(1, self.size)]] expected = [(i, list(np.arange(self.size) + i)) - for i in xrange(1, self.size)] + for i in range(1, self.size)] else: data = np.arange(self.size) + self.rank actual = self.gcomm.collect(data) @@ -297,56 +300,56 @@ def testCollectArray(self): self.gcomm.sync() msg = test_info_msg( self.rank, self.size, 'collect(array)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): for a in actual: self.assertTrue(a in expected, msg) else: self.assertEqual(actual, expected, msg) - + def testCollectStrArray(self): if self.gcomm.is_manager(): data = None actual = [(i, list(x)) for (i, x) in [self.gcomm.collect() - for _ in xrange(1, self.size)]] - expected = [(i, map(str, list(np.arange(self.size) + i))) - for i in xrange(1, self.size)] + for _ in range(1, self.size)]] + expected = [(i, list(map(str, list(np.arange(self.size) + i)))) + for i in range(1, self.size)] else: - data = np.array([str(i + self.rank) for i in xrange(self.size)]) + data = np.array([str(i + self.rank) for i in range(self.size)]) actual = self.gcomm.collect(data) expected = None self.gcomm.sync() msg = test_info_msg( self.rank, self.size, 'collect(string-array)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): for a in actual: self.assertTrue(a in expected, msg) else: self.assertEqual(actual, expected, msg) - + def testCollectCharArray(self): if self.gcomm.is_manager(): data = None actual = [(i, list(x)) for (i, x) in [self.gcomm.collect() - for _ in xrange(1, self.size)]] - expected = [(i, map(str, list(np.arange(self.size) + i))) - for i in xrange(1, self.size)] + for _ in range(1, self.size)]] + expected = [(i, list(map(lambda c: str(c).encode(), list(np.arange(self.size) + i)))) + for i in range(1, self.size)] else: - data = np.array([str(i + self.rank) for i in xrange(self.size)], + data = np.array([str(i + self.rank) for i in range(self.size)], dtype='c') actual = self.gcomm.collect(data) expected = None self.gcomm.sync() msg = test_info_msg( self.rank, self.size, 'collect(char-array)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): for a in actual: self.assertTrue(a in expected, msg) else: self.assertEqual(actual, expected, msg) - + def testRationInt(self): if self.gcomm.is_manager(): data = range(1, self.size) @@ -359,12 +362,12 @@ def testRationInt(self): self.gcomm.sync() msg = test_info_msg( self.rank, self.size, 'ration(int)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): self.assertEqual(actual, expected, msg) else: self.assertTrue(actual in expected, msg) - + def testRationArray(self): if self.gcomm.is_manager(): data = np.arange(3 * (self.size-1)) @@ -378,39 +381,38 @@ def testRationArray(self): self.gcomm.sync() msg = test_info_msg( self.rank, self.size, 'ration(array)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): self.assertEqual(actual, expected, msg) else: contained = any([np.all(actual == expected[i:i + actual.size]) for i in range(expected.size - actual.size + 1)]) self.assertTrue(contained, msg) - + def testRationStrArray(self): if self.gcomm.is_manager(): - data = np.array(map(str, range(3 * (self.size-1)))) - actual = [ - self.gcomm.ration(data[3 * i:3 * (i + 1)]) - for i in range(0, (self.size-1))] + data = np.array(list(map(str, range(3 * (self.size-1)))), dtype='c') + actual = [self.gcomm.ration(data[3 * i:3 * (i + 1)]) + for i in range(0, (self.size-1))] expected = [None] * (self.size - 1) else: data = None actual = self.gcomm.ration() - expected = np.array(map(str, range(3 * (self.size-1)))) + expected = np.array(list(map(str, range(3 * (self.size-1)))), dtype='c') self.gcomm.sync() msg = test_info_msg( self.rank, self.size, 'ration(string-array)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): self.assertEqual(actual, expected, msg) else: contained = any([np.all(actual == expected[i:i + actual.size]) for i in range(expected.size - actual.size + 1)]) self.assertTrue(contained, msg) - + def testRationCharArray(self): if self.gcomm.is_manager(): - data = np.array(map(str, range(3 * (self.size-1))), dtype='c') + data = np.array(list(map(str, range(3 * (self.size-1)))), dtype='c') actual = [ self.gcomm.ration(data[3 * i:3 * (i + 1)]) for i in range(0, (self.size-1))] @@ -418,11 +420,11 @@ def testRationCharArray(self): else: data = None actual = self.gcomm.ration() - expected = np.array(map(str, range(3 * (self.size-1))), dtype='c') + expected = np.array(list(map(str, range(3 * (self.size-1)))), dtype='c') self.gcomm.sync() msg = test_info_msg( self.rank, self.size, 'ration(char-array)', data, actual, expected) - print msg + print(msg) if self.gcomm.is_manager(): self.assertEqual(actual, expected, msg) else: @@ -433,12 +435,16 @@ def testRationCharArray(self): if __name__ == "__main__": hline = '=' * 70 if MPI_COMM_WORLD.Get_rank() == 0: - print hline - print 'STANDARD OUTPUT FROM ALL TESTS:' - print hline + print(hline) + print('STANDARD OUTPUT FROM ALL TESTS:') + print(hline) MPI_COMM_WORLD.Barrier() - from cStringIO import StringIO + try: + from cStringIO import StringIO + except ImportError: + from io import StringIO + mystream = StringIO() tests = unittest.TestLoader().loadTestsFromTestCase(SimpleCommParTests) unittest.TextTestRunner(stream=mystream).run(tests) @@ -447,7 +453,6 @@ def testRationCharArray(self): results = MPI_COMM_WORLD.gather(mystream.getvalue()) if MPI_COMM_WORLD.Get_rank() == 0: for rank, result in enumerate(results): - print hline - print 'TESTS RESULTS FOR RANK ' + str(rank) + ':' - print hline - print str(result) + print(hline) + print('TESTS RESULTS FOR RANK ' + str(rank) + ':') + print(str(result)) diff --git a/source/asaptools/test/timekeeperTests.py b/source/asaptools/test/timekeeperTests.py index 89378fac..f16a36d4 100644 --- a/source/asaptools/test/timekeeperTests.py +++ b/source/asaptools/test/timekeeperTests.py @@ -4,6 +4,9 @@ Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.txt file for details """ + +from __future__ import print_function + import unittest from time import sleep @@ -42,8 +45,9 @@ def test_start_stop_values(self): tk.start(name) sleep(wait_time) tk.stop(name) - self.assertAlmostEqual(wait_time, tk.get_time(name), - msg='Accumulated time seems off', places=2) + dt = tk.get_time(name) + dterr = abs(dt / wait_time - 1.0) + self.assertTrue(dterr < 0.15, msg='Accumulated time seems off') def test_start_stop_order_names(self): tk = timekeeper.TimeKeeper() @@ -73,12 +77,12 @@ def test_start_stop_values2(self): tk.stop(name1) sleep(wait_time) tk.stop(name2) - self.assertAlmostEqual(3 * wait_time, tk.get_time(name1), - msg='Accumulated time 1 seems off', - places=2) - self.assertAlmostEqual(2 * wait_time, tk.get_time(name2), - msg='Accumulated time 2 seems off', - places=2) + dt1 = tk.get_time(name1) + dt1err = abs(dt1 / (3 * wait_time) - 1.0) + self.assertTrue(dt1err < 0.15, msg='Accumulated time 1 seems off') + dt2 = tk.get_time(name2) + dt2err = abs(dt2 / (2 * wait_time) - 1.0) + self.assertTrue(dt2err < 0.15, msg='Accumulated time 1 seems off') def test_reset_values(self): tk = timekeeper.TimeKeeper() @@ -98,9 +102,9 @@ def test_get_time(self): tk.start(name) sleep(wait_time) tk.stop(name) - self.assertAlmostEqual(wait_time, tk.get_time(name), - msg='Get time seems off', - places=2) + dt = tk.get_time(name) + dterr = abs(dt / wait_time - 1.0) + self.assertTrue(dterr < 0.15, msg='Get time seems off') def test_get_all_times(self): tk = timekeeper.TimeKeeper() @@ -122,12 +126,12 @@ def test_get_all_times(self): self.assertTrue(all([i1==i2 for i1,i2 in zip(expected_all_times.keys(),all_times.keys())]), 'Clock names are not the same') - self.assertAlmostEqual(expected_all_times.values()[0], - all_times.values()[0], + self.assertAlmostEqual(list(expected_all_times.values())[0], + list(all_times.values())[0], msg='Accumulated time 1 seems off', places=1) - self.assertAlmostEqual(expected_all_times.values()[1], - all_times.values()[1], + self.assertAlmostEqual(list(expected_all_times.values())[1], + list(all_times.values())[1], msg='Accumulated time 2 seems off', places=1) diff --git a/source/asaptools/test/vprinterTests.py b/source/asaptools/test/vprinterTests.py index e80ffa57..fcbfded8 100644 --- a/source/asaptools/test/vprinterTests.py +++ b/source/asaptools/test/vprinterTests.py @@ -4,12 +4,18 @@ Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.txt file for details """ + +from __future__ import print_function + import unittest import sys from asaptools import vprinter from os import linesep -from cStringIO import StringIO +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO def test_message(name, data, actual, expected): @@ -30,7 +36,7 @@ def testToStr(self): actual = self.vprint.to_str(*data) expected = ''.join([str(d) for d in data]) msg = test_message('to_str(*data)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) def testToStrHeader(self): @@ -38,7 +44,7 @@ def testToStrHeader(self): actual = self.vprint.to_str(*data, header=True) expected = self.header + ''.join([str(d) for d in data]) msg = test_message('to_str(*data)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) def testVPrint(self): @@ -51,7 +57,7 @@ def testVPrint(self): sys.stdout = backup expected = self.vprint.to_str(*data) + linesep msg = test_message('vprint(*data)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) def testVPrintHeader(self): @@ -64,7 +70,7 @@ def testVPrintHeader(self): sys.stdout = backup expected = self.vprint.to_str(*data, header=True) + linesep msg = test_message('vprint(*data)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) def testVPrintVerbosityCut(self): @@ -77,7 +83,7 @@ def testVPrintVerbosityCut(self): sys.stdout = backup expected = '' msg = test_message('vprint(*data)', data, actual, expected) - print msg + print(msg) self.assertEqual(actual, expected, msg) diff --git a/source/asaptools/version.py b/source/asaptools/version.py index fa26b770..2b8ba9b7 100644 --- a/source/asaptools/version.py +++ b/source/asaptools/version.py @@ -1,2 +1,2 @@ # Current package version -__version__ = '0.5.5' +__version__ = '0.6.0' diff --git a/source/asaptools/vprinter.py b/source/asaptools/vprinter.py index 14d07d21..d644d6b4 100644 --- a/source/asaptools/vprinter.py +++ b/source/asaptools/vprinter.py @@ -8,6 +8,8 @@ See the LICENSE.txt file for details """ +from __future__ import print_function + class VPrinter(object): @@ -105,4 +107,4 @@ def __call__(self, *args, **kwargs): verbosity = kwargs['verbosity'] if verbosity < self.verbosity: - print self.to_str(*args, **kwargs) + print(self.to_str(*args, **kwargs)) From b3d29ca3413277cfe4dd3a88c2507000a325a21b Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Sun, 23 Apr 2017 18:14:32 -0600 Subject: [PATCH 21/27] Squashed 'reshaper/pyReshaper/' changes from 944ab10..769e587 769e587 Merge pull request #29 from NCAR/devel 95fcbe6 Merge pull request #28 from NCAR/chunking 7f1ebbb Adding ability to produce speedup plots 1a81993 Add ability to plot log plots and subselected jobs by data 4d65341 Adding functionality to plot log bar plots 17337ab Merge branch 'chunking' of https://github.com/NCAR/PyReshaper into chunking f5716ae Formatting only. db18732 New YS 8x16 (128 proc) timings. 8a27c17 New timings for 4x4 YS runs. f0ceb7e BUGFIX: Wrong checks when generating plots. Also resizing. 7a69d4e New Yellowstone timing numbers. d4cd015 Merge branch 'chunking' of https://github.com/NCAR/PyReshaper into chunking 5f366c1 Changing pop-0.1deg chunking to fit in yellowstone memory. 7f33829 Adding to change log. 883eeab Adding chunking documentation to manual 738d310 Bumping up pop-0.1deg chunk size 1d4598a Copying version 0.X timings into new database for comparisons 1d612ba Renaming new timings database to default name b960ddc Renaming old timings file to match old version c691b15 New timings database for new PyReshaper version 27b420c Changing to indicate read chunk sizes only (write are same) b8e49e2 Attempt to adjust chunk sizes. 81b0223 Merge branch 'chunking' of https://github.com/NCAR/PyReshaper into chunking 66ca398 Set camfv-1deg chunk sizes to none (read/write whole variables) e635b00 Merge branch 'chunking' of https://github.com/NCAR/PyReshaper into chunking 57b2fe6 Moving sync location in convert 82395f5 BUGFIX: Wrong capitalization when attempting to call getdb() 5627dca Adding a message about the chunk sizes used. 47109f1 Fixing typo in warning message 4eed760 Adding chunking parameters to testinfo database. b84c7ad Merge pull request #27 from NCAR/devel fb809a4 BUGFIX: Datatype must be converted to a numpy dtype c3c204c Revert "Working on updating to Python3" 0de3b37 Revert "Updating IOBackend tests to work with Python 2.6+" 3a3f1d8 Revert "Updating to work with Python2.6+" 2f0fc2c Revert "Updating to work with Python2.6+" 66374d9 Updating to work with Python2.6+ a8f0fc2 Updating to work with Python2.6+ 232406a Updating IOBackend tests to work with Python 2.6+ 549ddf9 Working on updating to Python3 3350d71 Simplifying chunking argument parsing f703b59 Suppressing PyDev error 82cbcc0 Cleanup and passing in chunks to Reshaper convert method 48b4740 Enabling chunking via command-line script 17e98ad Adding ability to create new timings file at runtime. 3556e9b Adding ability to read/write separate timings files 62d0173 Removing NetCDF compare tool. Try to use cprnc. d413515 Adding pretty-print to output 8b3c256 Allowing backwards-compatibility with older netCDF4 b34445f Removing conflict with parser's help option 93ab872 One more modification 59155ad Making the script executable from the command line ae172ca Adding a NetCDF compare tool 533756b Merge branch 'devel' of https://github.com/NCAR/PyReshaper into devel e5ed2ac Adding module load to init and ignoring results directory. 68a3e3c Bugfix. Need to set default chunking after unlimited dim is found. d1c4edd Some cleanup and restructuring. 4343273 First pass at actual chunking code 5a07d3a Not printing 'usage'. Printing 'use'. a66c288 Merge pull request #25 from NCAR/memory_diag af921ec Adding maximum memory usage diagnostic output 04ff328 Merge pull request #24 from NCAR/extract_only df9e99c Updated changelog file. No more dates. Reverse order. a2c3e47 Updating the manual to describe the new extract-only features f31a7a0 Adding command-line option to extract only some time-series variables c3d06e8 Extract only some variables seems to work from Python side. 79af819 Checking extract_only tests. Appears to work, but needs more testing. 58eeb21 Updating specification tests to include time-series name list 13578d2 Updated GitHub references 2567496 First pass as modifying reshaper to use explicit time-series name list 031cfda Modifying specification to take time-series name list 7f7fa13 Merge pull request #23 from NCAR/default_1D_metadata b4ec088 Updating manual for new feature c87bafc Finished adding option to assert all 1d time-var variables are metadata 568d534 Cleanup: Better allowances for netCDF4-python v1.1.1 95aaf34 attempting to add netCDF4 v1.1.1 support 38ab628 Updating bakeoff data location on GLADE 9f55a5e Need to repr the wmode argument when writing script (must be str) f78f002 Bugfix 670edfa Backporting checkresults to Python 2.6 cc0a772 Bugfix: more specfile extension changing cbed5cd Bugfix: specfile has extension s2s 38fef51 bugfix f1de114 Bugfix and changing usage statements cf29b54 More formatting of descriptions 601081d Updating descriptions of test utilities dbff464 Backporting YS tests to Python version 2.6 fed167e Merge pull request #22 from NCAR/iobackend 7aa5477 Updating README to be consistent with manual. 12aa9e3 Updating CHANGELOG 44823f9 Updating user documentation 0a8d18b Updating comments only. f5e7ad7 Modifying CHANGES log f3b6041 Formatting only. a38aa2a Updating changelog. 6eab81b Cleaning up iobackend branch, allowing I/O backend to be set by default f762a67 Moved I/O backend info output to initialization 69548a0 More output about the NetCDF output settings. 7b0f71b Allowing the use of Python 2.6 ordereddict module, if available 6cd1d9f Changing the default backend to PyNIO (tests suggest it's faster) 74e497c Removing unnecessary import 915bf1c Updating tests with new names 469a0e0 Implementing use of new get_value/assign_value functions b557895 Adding get_value and assign_value methods for general get/set e26ffb1 Attempting to convert typecode 'S' to 'c' when using PyNIO e8bd27a Casting datatype to string in Nio create_variable d82aacc Adding user-level output on which I/O backend is being used. 9cdad55 Adding option for I/O backend to s2smake (stored in specfile) 562f6c4 Updating to new iobackend changes 6df401a Removing debug output. Changing 'typecode' str to 'datatype' Numpy.dtype 19ced93 More debugging info. 0634559 Adding debug output. ee97a88 Switching PyReshaper objects over to new iobackend dae810d Adding specifier checks on backend 49c081f Adding shape and size methods to NCVariable 3ed54aa Adding append tests. All tests pass. 8b72ef5 Added write tests. All are passing. 7c56210 Cleaned up API. Switched to netCDF4 way of dealing with attributes. 0c3cfd8 Now adding iobackend and tests. 43cc6d8 Updated copyright info. Added iobackend module and started tests. 2257386 Removing MultiSpecReshaper reference from factory function 18b5359 Removing MultiSpecReshaper 139f708 Starting work on new memory-management system/partitioner git-subtree-dir: reshaper/pyReshaper git-subtree-split: 769e587dabe15c0ecaf4d0c2903152a12ae640ce --- .pydevproject | 2 +- CHANGES.rst | 153 +- README.rst | 36 +- docs/source/iobackend.rst | 8 + docs/source/manual.rst | 609 +- docs/source/pyreshaper.rst | 1 + scripts/s2smake | 23 +- scripts/s2srun | 14 +- setup.py | 15 +- source/pyreshaper/iobackend.py | 359 + source/pyreshaper/reshaper.py | 613 +- source/pyreshaper/specification.py | 64 +- source/pyreshaper/test/iobackendTests.py | 1120 ++++ source/pyreshaper/test/mkTestData.py | 5 +- .../{s2sReshaperTests.py => reshaperTests.py} | 114 +- source/pyreshaper/test/s2smakeTests.py | 45 +- source/pyreshaper/test/s2srunTests.py | 95 +- source/pyreshaper/test/specificationTests.py | 97 +- source/pyreshaper/version.py | 2 +- tests/yellowstone/.gitignore | 2 + tests/yellowstone/checkresults.py | 78 +- tests/yellowstone/init.sh | 3 + tests/yellowstone/mkplots.py | 112 +- tests/yellowstone/mkstats.py | 67 +- tests/yellowstone/mktimings.py | 30 +- tests/yellowstone/runtests.py | 276 +- tests/yellowstone/testinfo.json | 111 +- tests/yellowstone/timings-v0.json | 5464 ++++++++++++++++ tests/yellowstone/timings.json | 5751 ++--------------- tests/yellowstone/utilities/plottools.py | 61 +- tests/yellowstone/utilities/testtools.py | 19 +- 31 files changed, 8999 insertions(+), 6350 deletions(-) create mode 100644 docs/source/iobackend.rst create mode 100644 source/pyreshaper/iobackend.py create mode 100644 source/pyreshaper/test/iobackendTests.py rename source/pyreshaper/test/{s2sReshaperTests.py => reshaperTests.py} (74%) create mode 100644 tests/yellowstone/.gitignore create mode 100644 tests/yellowstone/timings-v0.json diff --git a/.pydevproject b/.pydevproject index 8e4b1e0d..359f70bd 100644 --- a/.pydevproject +++ b/.pydevproject @@ -1,7 +1,7 @@ python 2.6 -Homebrew Python 2.6 +Homebrew Python 2.7 /${PROJECT_DIR_NAME}/source /${PROJECT_DIR_NAME}/tests/yellowstone/utilities diff --git a/CHANGES.rst b/CHANGES.rst index dce7708b..8a35cb45 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,126 +1,113 @@ PyReshaper Change Log ===================== -Copyright 2015, University Corporation for Atmospheric Research +Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.rst file for details -VERSION 0.9.0 + +VERSION 1.0.0 ------------- -9 Jun 2014: - - Initial release. (Previously in prototype repo.) + - New I/O backend manager. Can work with netCDF4 or PyNIO. + - Removing hierarchy to Reshaper classes, as there is only 1 (similarly + removing hierarchy of matching Specifier classes) + - No longer requires PyNIO to install...but you need netCDF4 or PyNIO to + run the reshaper! + - Adding ability to treat all 1D time-dependent variables as metadata + with only one command-line option + - Adding ability to extract out only some time-series files, instead of + requiring all time-series files be extracted. This option should be + used sparingly and with caution! + - Adding the ability to "chunk" over user-specified dimensions when + reading and writing. This finally allows for some control over memory + use! -1 Jul 2014: - - Improvements to the Yellowstone testing scripts -2 Jul 2014: - - Added new UCAR license agreement +VERSION 0.9.10 +-------------- + + - Python 2.6 back-porting + + +VERSION 0.9.6 +------------- + + - Split 'slice2series' script into 2 separate scripts: 's2smake' to generate + specifier object files (specfiles), and 's2srun' to run a reshaper job + with a given specifier object file + - Now uses 'write mode' to determing if overwriting output files or skipping + existing output files + - Added capability to append to existing output files + + +VERSION 0.9.5 +------------- + + - Fix bug in the 'slice2series' script + - Adds a write to file option for the Specifiers + - Modifying output message verbosity settings + + +VERSION 0.9.4 +------------- + + - Newly automated versioning information + - Bugfixes, performance improvements + - New yellowstone testing code + - Now measures read/write times separately in diagnostic data + - Performance improvement (no explicit loop over time-steps in a time-slice + file needed) + - Enabling user-defined compression level for NetCDF4 output + + +VERSION 0.9.3 +------------- + + - Bugfix: Now installs properly from PyPI + + +VERSION 0.9.2 +------------- + + - Tagging new branch for version 0.9.2 release. + - Restructured source tree + - Installation of LICENSE file with package + - Updates for upload to PyPI VERSION 0.9.1 ------------- -4 Aug 2014: - Added many new helper tools in the Yellowstone testing directory. - -18 Aug 2014: - Perform a sort of the time-series variables by size before partitioning them across processors. Since the partition strides through the list of variables names, by sorting the variables by size first, we do a reasonable job of selecting as many small variables as large on every processor - -28 Aug 2014: - A few minor typo bugfixes. - -1 Sep 2014: - Added the ability to output all metadata to a "once" file. This metadata includes ALL variables that are NOT the time-series variables being written to file. Hence, this can produce time-series files that are not entirely self-describing. - -3 Sep 2014: - Fixed a bug in which a job hangs when using more ranks than variables - -10 Sep 2014: - Switched to using the identified 'unlimited' dimension name instead of explicitly using the string 'time'. - -11 Sep 2014: - Added the ability to process time-slice files with multiple time steps in each slice file. - Added new plotting utility and module. Some changes to the getsizes utility in the Yellowstone testing area. Some more tweaks here, too. - -4 March 2015: - Updated the PyReshaper to use the newly created PyTools/trunk. (Will modify the externals to use a tag once the new PyTools has been tested and verified). - Renamed any ATM/LND 0.1 degree tests in the tests/yellowstone directory to 0.25 degree, which they actually are. Similarly, any 0.25 degree OCN/ICE tests were named 0.1 degree. - -16 March 2015: - Updated the Specifier and Reshaper Doxygen comments to use Doxypypy formatting. - -VERSION 0.9.2 -------------- - -26 March 2015: - - Tagging new branch for version 0.9.2 release. - -10 June 2015: - - Restructured source tree - - Installation of LICENSE file with package - - Updates for upload to PyPI - -VERSION 0.9.3 -------------- - -10 June 2015: - - Bugfix: Now installs properly from PyPI - -VERSION 0.9.4 -------------- - -29 June 2015: - - Newly automated versioning information - -1 October 2015: - - Bugfixes, performance improvements - - New yellowstone testing code -2 October 2015: - - Now measures read/write times separately in diagnostic data - - Performance improvement (no explicit loop over time-steps in a time-slice - file needed) - - Enabling user-defined compression level for NetCDF4 output - -VERSION 0.9.5 -------------- -6 October 2015: - - Fix bug in the 'slice2series' script - - Adds a write to file option for the Specifiers - - Modifying output message verbosity settings - -VERSION 0.9.6 +VERSION 0.9.0 ------------- -7 October 2015: - - Split 'slice2series' script into 2 separate scripts: 's2smake' to generate - specifier object files (specfiles), and 's2srun' to run a reshaper job - with a given specifier object file - - Now uses 'write mode' to determing if overwriting output files or skipping - existing output files - -12 October 2015: - - Added capability to append to existing output files - -VERSION 0.9.10 --------------- - -7 March 2016: - - Python 2.6 back-porting - - \ No newline at end of file + - Initial release. (Previously in prototype repo.) + - Improvements to the Yellowstone testing scripts + - Added new UCAR license agreement diff --git a/README.rst b/README.rst index 1f389b08..ea3376a5 100644 --- a/README.rst +++ b/README.rst @@ -14,9 +14,9 @@ Send questions and comments to Kevin Paul (kpaul@ucar.edu). Overview -------- -The PyReshaper package is a Python-based package for performing time-slice -to time-series convertion of NetCDF files, compliant with the CF 1.6 -Conventions. The PyReshaper package is designed to run in parallel to +The PyReshaper is a tool for converting time-slice (or history-file +or synoptically) formatted NetCDF files into time-series (or single-field) +format. The PyReshaper package is designed to run in parallel (MPI) to maximize performance, with the parallelism implemented over variables (i.e., task parallelism). This means that the maximum parallelism achieveable for a given operation is one core/processor per variables in @@ -26,25 +26,23 @@ the time-slice NetCDF files. Dependencies ------------ -The PyReshaper directly depends upon the PyNIO and mpi4py packages. Access -and manipulation of the NetCDF files is done through PyNIO, and the parallelism -is implimented directly with mpi4py. Implicit dependencies exists, as PyNIO -has its own dependencies (netCDF, NCL, and numpy) as does mpi4py (numpy and -MPI). +The PyReshaper directly depends upon the ASAP Python Toolbox (ASAPTools) +and either PyNIO or netcdf4-python. Access and manipulation of the NetCDF +files is done through PyNIO or netcdf4-python, and the parallelism is +implimented using the ASAPTools SimpleComm, which uses mpi4py. Implicit +dependencies exist as a result of these direct dependencies. -Currently the explicit dependencies are known to be: +The PyReshaper explicitly depends upon the following Python packages: -* ASAPTools (>=0.4) -* PyNIO (>=1.4.1) -* mpi4py (>=1.3) +- PyNIO (v1.4.1+) or netCDF4-python (v1.2+) +- ASAPPyTools (v0.4+) -This implies the dependencies: - -* PyNIO depends upon numpy (>=1.4), NCL, and netCDF -* mpi4py depends on numpy (>=1.4) and MPI +These packages imply a dependency on the NumPy (v1.4+) and mpi4py (v1.3+) +packages, and the libraries NetCDF and MPI/MPI-2. Additionally, the entire package is designed to work with Python v2.6 and up -to (but not including) Python v3.0. +to (but not including) Python v3.0. If using Python version 2.6, you will +need to install the ``ordereddict`` package, too. The version requirements have not been rigidly tested, so earlier versions may actually work. No version requirement is made during installation, though, @@ -77,7 +75,7 @@ via git from the site:: https://github.com/NCAR/PyReshaper -Check out the most recent stable tag. The source is available in +You may then check out the most recent stable tag. The source is available in read-only mode to everyone. Developers are welcome to update the source and submit Pull Requests via GitHub. @@ -109,7 +107,7 @@ to install. To do this, type (on unix machines):: $ python setup.py install --user This can be handy since the site-packages directory will be common for all -user installs, and therefore only needs to be added to the PYTHONPATH once. +user installs, and therefore only needs to be added to the ``PYTHONPATH`` once. To install the documentation, you must have Sphinx installed on your system. Sphinx can be easily installed with pip, via:: diff --git a/docs/source/iobackend.rst b/docs/source/iobackend.rst new file mode 100644 index 00000000..64cf99a0 --- /dev/null +++ b/docs/source/iobackend.rst @@ -0,0 +1,8 @@ + +pyreshaper.iobackend module +------------------------------- + +.. automodule:: pyreshaper.iobackend + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/manual.rst b/docs/source/manual.rst index 28c9fe20..8964771b 100644 --- a/docs/source/manual.rst +++ b/docs/source/manual.rst @@ -5,103 +5,60 @@ The PyReshaper User's Manual What is it? =========== -The PyReshaper is a tool for converting NetCDF time-slice formatted -files into time-series format. It is written in Python as an -easy-to-install package consisting of 4 Python modules. +The PyReshaper is a tool for converting time-slice (or history-file +or synoptically) formatted NetCDF files into time-series (or single-field) +format. Requirements ------------ -The PyReshaper is built upon 2 third-party Python packages, which -separately depend upon other packages, as indicated below. +The PyReshaper explicitly depends upon the following Python packages: + +- PyNIO (v1.4.1+) or netCDF4-python (v1.2+) +- ASAPPyTools (v0.4+) -- PyNIO (v1.4.1) -- numpy (v1.4) -- NetCDF -- mpi4py (v1.3) -- A dynamic/shared library installation of MPI or MPI-2 +These packages imply a dependency on the NumPy (v1.4+) and mpi4py (v1.3+) +packages, and the libraries NetCDF and MPI/MPI-2. + +If using Python version 2.6, you will need to install the ``ordereddict`` +package, too. No thorough testing has been done to show whether earlier versions of these dependencies will work with the PyReshaper. The versions listed have been shown to work, and it is assumed that later versions will continue to work. -How can I get it? -================= - -The best way to obtain the PyReshaper code is to check it out from the -GitHub site, as shown below. - -:: - - $ git clone https://github.com/NCAR-CISL-ASAP/PyReshaper - $ cd PyReshaper - -This will download the most recent stable version of the source code. If -the most recent version of the non-stable source is desired, you may switch -to the development branch. - -:: - - $ git checkout devel - - -How do I set it up? -=================== - -Easy Installation ------------------ +How can I install it? +===================== -The easiest way to install the PyReshaper is from the Python Package Index, -PyPI. To do this, use the ``pip`` tool like follows. +The easiest way of obtaining the PyReshaper is from the Python Package +Index (PyPI), using ``pip``: :: $ pip install [--user] PyReshaper - -If you do not have the required dependencies installed, then ``pip`` will -install them for you at this time. The ``--user`` option will be necessary -if you do not have system install privileges on the machine you are using. - -Installation from Source ------------------------- - -In this section, we describe how to install the PyReshaper package on a -unix-like system. The procedure is similar for a Mac, but we have not -tested the package on Windows. - -As described in the previous section, first check out the source code -from the subversion repository. On unix-like systems, the command is -shown below. - -:: - - $ git clone https://github.com/NCAR-CISL-ASAP/PyReshaper -Enter into the newly created directory. +Alternatively, you can download the source from GitHub and install with +``setuptools``: :: + $ git clone https://github.com/NCAR/PyReshaper $ cd PyReshaper + $ python setup.py install [--user] -The contents of the repository will look like the following. +This will download and install the most recent stable version of the source +code. If the most recent version of the non-stable source is desired, you +may switch to the development branch before installing. :: - $ ls - CHANGES.rst README.rst docs/ setup.py tests/ - LICENSE.rst bin/ setup.cfg source/ - -To install the package, type the following command from this directory. - -:: - - $ python setup.py install [--user] - -If you are a system administrator, you can leave off the ``--user`` -option, and the package will be installed in ``/usr/local``, by default. -Alternatively, you may specify your own installation root directory with -the ``--prefix`` option. + $ git checkout devel + +When installing, the ``--user`` option to either ``pip`` or ``setup.py`` +will install the PyReshaper in the user's private workspace, as defined +by the system on which the user is installing. This is useful if you don't +have permissions to install system-wide software. Generating the API Documentation -------------------------------- @@ -175,11 +132,16 @@ Before we describe the various ways you can use the PyReshaper, we must describe more about what, precisely, the PyReshaper is designed to do. As we've already mentioned, the PyReshaper is designed to convert a set -of NetCDF files from time-slice (i.e., multiple time-dependent variables -with one time-value per file) format to time-series (one time-dependent -variable with multiple time-values per file) format, either in serial or -parallel. In serial, the PyReshaper will write each time-series variable -to its own file in sequence. In parallel, time-series variables will be +of NetCDF files from time-slice (i.e., synoptic or history-file) format +to time-series (or single-field) format, either in serial or parallel. +Time-slice files contain all of the model variables in one file, but typically +only span 1 or a few time-steps per file. Time-series files nominally contain +only 1 single time-dependent variable spanning many time-steps, but they +can additionally contain metadata used to describe the single-field variable +contained by the file. + +In serial, the PyReshaper will write each time-series variable to its own +file in sequence. In parallel, time-series variables will be written simultaneously across the MPI processes allocated for the job. There are a number of assumptions that the PyReshaper makes regarding the @@ -192,20 +154,24 @@ time-slice (input) data, which we list below. overlap with each other. (That is, each time-slice NetCDF file can contain data spanning a number of simulation time steps. However, the span of time contained in one time slice cannot overlap the span of - time in another time-slice.) + time in another time-slice.) If the time-slices overlap, an error + will be given and execution will stop. 3. Every time-slice NetCDF file contains the same time-dependent variables, just at differing times. Similarly, there are a number of assumptions made about the time-series -(output) data produced by the PyReshaper conversion process. - -1. By default, every time-dependent variable will be written to its own - time-series NetCDF file. -2. Any time-dependent variables that should be included in every - time-series file (e.g., such as ``time`` itself), instead of getting - their own time-series file, must be specified by name. -3. Every time-independent variable that appears in the time-slice files - will be written to every time-series file. +(output) data produced by the PyReshaper conversion process. The variables +written to the output data can be time-series variables or metadata +variables. Time-series variables are written to one output file only. +Metadata variables are written to all output files. + +1. By default, every time-dependent variable will be assumed to be a + time-series variable (i.e., written to its own time-series NetCDF file). +2. Every time-independent variable that appears in the time-slice files + will be assumed to be a metadata variable (i.e., written to every + time-series file). +3. Users can explicitly specify any number of time-dependent variables + as metadata variables (e.g., such as ``time`` itself). 4. Every time-series file written by the PyReshaper will span the total range of time spanned by all time-slice files specified. 5. Every time-series file will be named with the same prefix and suffix, @@ -213,7 +179,7 @@ Similarly, there are a number of assumptions made about the time-series time\_series\_filename = prefix + variable\_name + suffix -where the variable\_name is the name of the time-dependent variable +where the variable\_name is the name of the time-series variable associated with that time-series file. It is important to understand the implications of the last assumption on @@ -223,11 +189,197 @@ file-name to contain information that pertains to the time-sampling frequency of the data in the file, or the range of time spanned by the time-series file, or any number of other things. To conform to such naming conventions, it may be required that the total set of time-slice -files that the user which to convert to time-series be given to the -PyReshaper in multiple subsets, or chunks. Throughout this manual, we -will refer to such "chunks" as streams. As such, every single PyReshaper +files that the user wishes to convert to time-series be given to the +PyReshaper in multiple subsets, running the PyReshaper independently on +each subset of time-slice files. Throughout this manual, we +will refer to such "subsets" as streams. As such, every single PyReshaper operation is designed to act on a single stream. +Using the PyReshaper from the Unix Command-Line +----------------------------------------------- + +While the most flexible way of using the PyReshaper is from within +Python, as described above, the easiest way to use the PyReshaper is usually +to run the PyReshaper command-line utilities. In this section, we describe +how to use the command-line utilities ``s2smake`` and ``s2srun``, which +provide command-line interfaces (CLI) to the PyReshaper. (These scripts +will be installed in the ``$PREFIX/bin`` directory, where ``PREFIX`` is the +installation root directory. If you installed PyReshaper with the ``--user`` +flag, you may need to add this directpry to your path.) + +The ``s2smake`` utility is designed to generate a Specifier object file +(*specfile*) that contains a specification of the PyReshaper job. +The ``s2srun`` utility is then used to run the PyReshaper with the newly +generated *specfile*. + +Below is an example of how to use the PyReshaper's ``s2smake`` utility, +with all options and parameters specified on the command line. + +:: + + $ s2smake \ + --netcdf_format="netcdf4" \ + --compression_level=1 \ + --output_prefix="/path/to/outfile_prefix." \ + --output_suffix=".000101-001012.nc" \ + -m "time" -m "time_bounds" \ + --specfile=example.s2s \ + /path/to/infiles/*.nc + +In this example, you will note that we have specified each +time-dependent metadata variable name with its own ``-m`` option. (In +this case, there are only 2, ``time`` and ``time_bounds``.) We have also +specified the list of input (time-slice) files using a wildcard, which +the Unix shell fills in with a list of all filenames that match this *glob* +*pattern*. In this case, we are specifying all files with the ``.nc`` file +extension in the directory ``/path/to/infiles``. These command-line options +and arguments specify all of the same input needed to run the PyReshaper. +Running this command will save this PyReshaper *specfile* in a file called +``example.s2s``. + +When using *glob patterns*, it is important to understand that the *shell* +expands these glob patterns out into the full list of matching filenames +*before* running the ``s2smake`` command. On many systems, the length of +a shell command is limited to a fixed number of characters, and it is possible +for the *glob pattern* to expand to a length that makes the command too long +for the shell to execute! If this is the case, you may contain your glob +pattern in quotation marks (i.e., ``"/path/to/infiles/*.nc"`` instead of +``/path/to/infiles/*.nc``). The ``s2smake`` command will then expand the +glob pattern internally, allowing you to avoid the command-line character +limit of the system. + +With the *specfile* created and saved using the ``s2smake`` utility, +we can run the PyReshaper with this *specfile* using the ``s2srun`` utility, +with all options and parameters specified on the command line. + +:: + + $ s2srun --serial --verbosity=2 example.s2s + +The example above shows the execution, in serial, of the PyReshaper job +specified by the ``example.s2s`` *specfile* with a verbosity +level of 2. + +For parallel operation, one must launch the ``s2srun`` script from +the appropriate MPI launcher. On the NCAR Yellowstone system +(``yellowstone.ucar.edu``), for example, this is done with the following +command. + +:: + + $ mpirun.lsf s2srun --verbosity=3 example.s2s + +In the above example, this will launch the ``s2srun`` script into +the MPI environment already created by either a request for an +interactive session or from an LSF submission script. + +Arguments to the ``s2smake`` Script +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The arguments to the ``s2smake`` utility are as follows. + +- ``--backend BACKEND`` (``-b BACKEND``): I/O backend to be used when + reading or writing from NetCDF files. The parameter ``BACKEND`` can be one + of ``'Nio'`` or ``'netCDF4'``, indicating PyNIO or netCDF4-python, respectively. + The default value is ``'netCDF4'``. + +- ``--compression_level C`` (``-c C``): NetCDF compression level, when using the + netcdf4 file format, where ``C`` is an integer between 0 and 9, with 0 indicating + no compression at all and 9 indicating the highest level of compression. The + default compression level is 1. + +- ``--netcdf_format NCFORMAT`` (``-f NCFORMAT``): NetCDF file format to be used + for all output files, where ``NCFORMAT`` can be ``'netcdf'``, ``'netcdf4'``, or + ``'netcdf4c'``, indicating NetCDF3 Classic format, NetCDF4 Classic format, or + NetCDF4 Classic format with forced compression level 1. The default file format + is ``'netcdf4'``. + +- ``--metadata VNAME`` (``-m VNAME``): Indicate that the variable ``VNAME`` should + be treated as metadata, and written to all output files. There may be more than + one ``--metadata`` (or ``-m``) options given, each one being added to a list. + +- ``--meta1d`` (``-1``): This flag forces all 1D time-variant variables to be treated + as metadata. These variables need not be added explicitly to the list of metadata + variables (i.e., with the ``--metadata`` or ``-m`` argument). These variables will + be added to the list when the PyReshaper runs. + +- ``--specfile SPECFILE`` (``-o SPECFILE``): The name of the *specfile* to write, + containing the specification of the PyReshaper job. The default *specfile* name + is ``'input.s2s'``. + +- ``--output_prefix PREFIX`` (``-p PREFIX``): A string specifying the prefix to be + given to all output filenames. The output file will be named according to the + rule: + + ``output_prefix + variable_name + output_suffix`` + + The default output filename prefix is ``'tseries.'``. + +- ``--output_suffix SUFFIX`` (``-s SUFFIX``): A string specifying the suffix to be + given to all output filenames. The output file will be named according to the + rule: + + ``output_prefix + variable_name + output_suffix`` + + The default output filename suffix is ``'.nc'``. + +- ``--time_series VNAME``: Indicate that only the named ``VNAME`` variables should + be treated as time-series variables and extracted into their own time-series files. + This option works like the ``--metadata`` option, in that multiple occurrences of + this option can be used to extract out only the time-series variables given. If + any variable names are given to both the ``--metadata`` and ``--time_series`` + options, then the variable will be treated as metadata. If the ``--time_series`` + option is *not* used, then all time-dependent variables that are not specified to + be metadata (i.e., with the ``--metadata`` option) will be treated as time-series + variables and given their own output file. **NOTE: If you use this option, data + can be left untransformed from time-slice to time-series output! DO NOT DELETE + YOUR OLD TIME-SLICE FILES!** + +Each input file should be listed in sequence, space separated, on the command line to +the utility, nominally after all other options have been specified. + + +Arguments to the ``s2srun`` Script +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +While the basic options shown in the previous examples above are +sufficient for most purposes, additional options are available. + +- ``--chunk NAME,SIZE`` (``-c NAME,SIZE``): This command-line option can be used + to specify a maximum read/write chunk-size ``SIZE`` along a given named dimension + ``NAME``. Multiple ``--chunk`` options can be given to specify chunk-sizes along + multiple dimensions. This option determines the size of the data "chunk" read + from a single input file (and then written to an output file). If the chunk-size + is greater than the given dimension size, then the entire dimension will be read + at once. If the chunk-size is less than the given dimension size, then all variables + that depend on that dimension will be read in multiple parts, each "chunk" being written + before the next is read. This can be important to control memory use. By default, + chunking is done over the unlimited dimension with a chunk-size of 1. + +- ``--limit L`` (``-l L``): This command-line option can be used to set the + ``output_limit`` argument of the PyReshaper ``convert()`` function, + described below. This can be used when testing to only output the first ``L`` + files. The default value is 0, which indicates no limit (normal operation). + +- ``--write_mode M`` (``-m M``): This command-line option can be used to set + the ``wmode`` output file write-mode parameter of the ``create_reshaper()`` + function, described below. The default write mode is ``'w'``, which indicates + normal writing, which will error if the output files already exists (i.e., + no overwriting). Other options are ``'o'`` to overwrite existing output files, + ``'s'`` to skip existing output files, ``'a'`` to append to existing output + files. + +- ``--serial`` (``-s``): If this flag is used, it will run the PyReshaper in + serial mode. By default, it will run PyReshaper in parallel mode. + +- ``--verbosity V`` (``-v V``): Sets the verbosity level for standard output + from the PyReshaper. A level of 0 means no output, and a value of 1 or more + means increasingly more output. The default verbosity level is 1. + +Nominally, the last argument given to the ``s2srun`` utility should be the name +of the *specfile* to run. + + Using the PyReshaper from within Python --------------------------------------- @@ -238,18 +390,15 @@ external third-party library. The library API for the PyReshaper is designed to be simple and light-weight, making it easy to use in your own Python tools or scripts. -Single-Stream Usage -~~~~~~~~~~~~~~~~~~~ - Below, we show an example of how to use the PyReshaper from within -Python to convert a single stream from time-slice format to time-series +Python to convert a stream from time-slice format to time-series format. .. code:: py from pyreshaper import specification, reshaper - # Create a Specifier object (that defined a single stream to be converted + # Create a Specifier object specifier = specification.create_specifier() # Specify the input needed to perform the PyReshaper conversion @@ -258,7 +407,7 @@ format. specifier.compression_level = 1 specifier.output_file_prefix = "/path/to/outfile_prefix." specifier.output_file_suffix = ".000101-001012.nc" - specifier.time_variant_metadata = ["time", "time_bounds", ...] + specifier.time_variant_metadata = ["time", "time_bounds"] # Create the PyReshaper object rshpr = reshaper.create_reshaper(specifier, @@ -279,7 +428,7 @@ which is defined in the specification module). We will describe each attribute of the Specifier object below. Specifier Object Attributes -^^^^^^^^^^^^^^^^^^^^^^^^^^^ +~~~~~~~~~~~~~~~~~~~~~~~~~~~ - ``input_file_list``: This specifies a list of input (time-slice) file paths that all conform to the input file assumptions (described @@ -329,14 +478,62 @@ can include the full, absolute path information for the output time-invariant (time-independent) variables will be treat as metadata automatically. -Even though the PyReshaper is designed to work on a single stream at a -time, multiple streams can be defined as input to the PyReshaper. When -running the PyReshaper with multiple stream, multiple Specifier objects -must be created, one for each stream. See the section on -Multiple Stream Usage. +- ``assume_1d_time_variant_metadata``: If set to ``True``, this indicates + that all 1D time-variant variables (i.e., variables that *only* depend + upon ``time``) should be added to the list of ``time_variant_metadata`` + when the Reshaper runs. The default for this option is ``False``. + +- ``time_series``: If set to a list of string variable names, only these + variable names will be transformed into time-series format. This is + equivalent to the ``--time_series`` option to the ``s2smake`` utility. + **NOTE: Setting this attribute can leave data untransformed from time-slice + to time-series format! DO NOT DELETE YOUR OLD TIME-SLICE FILES!** + +- ``backend``: This specifies which I/O backend to use for reading + and writing NetCDF files. The default backend is ``'netCDF4'``, but + the user can alternatively specify ``'Nio'`` to use PyNIO. + +Specifier Object Methods +~~~~~~~~~~~~~~~~~~~~~~~~ + +In addition to the attributes above, the Specifier objects have some useful +methods that can be called. + +- ``validate()``: Calling this function validates the attributes of the + Specifier, making sure their types and values appear correct. +- ``write(filename)``: Calling this function with the argument ``filename`` + will write the *specfile* matching the Specifier. + + +Specfiles +~~~~~~~~~ + +*Specfiles* are simply *pickled* Specifier objects written to a file. To +create a *specfile*, one can simply call the Specifier's ``write()`` method, +described above, or one can explicitly *pickle* the Specifier directly, as +shown below. + +.. code:: py + + import pickle + + # Assume "spec" is an existing Specifier instance + pickle.dump(spec, open("specfile.s2s", "wb")) + +This is equivalent to the call ``spec.write('specfile.s2s')``. + +A *specfile* can be read with the following Python code. + +.. code:: py + + import pickle + + spec = pickle.load( open("specfile.s2s", "rb") ) + + Arguments to the ``create_reshaper()`` Function -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the example above, the PyReshaper object (rshpr) is created by passing the single Specifier instance to the *factory* function @@ -398,191 +595,29 @@ takes the following parameters. may create their own ``SimpleComm`` object and force the PyReshaper to use it by setting this option equal to the user-created ``SimpleComm`` instance. -Arguments to the ``convert()`` Function -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -While not shown in the above examples, there is an argument to the -``convert()`` function of the PyReshaper object called ``output_limit``. -This argument sets an integer limit on the number of time-series files -generated during the ``convert()`` operation (per MPI process). This can -be useful for debugging purposes, as it can greatly reduce the length of -time consumed in the ``convert()`` function. A value of ``0`` indicates -no limit, or all output files will be generated. - -Multiple Stream Usage -~~~~~~~~~~~~~~~~~~~~~ - -In the example below, we show one way to define a multiple stream -PyReshaper run. - -.. code:: py - - from pyreshaper import specification, reshaper - - # Assuming all data defining each stream is contained - # in a list called "streams" - specifiers = {} - for stream in streams: - specifier = specification.create_specifier() - - # Define the Pyreshaper input for this stream - specifier.input_file_list = stream.input_file_list - specifier.netcdf_format = stream.netcdf_format - specifier.compression_level = stream.compression_level - specifier.output_file_prefix = stream.output_file_prefix - specifier.output_file_suffix = stream.output_file_suffix - specifier.time_variant_metadata = stream.time_variant_metadata - - # Append this Specifier to the dictionary of specifiers - specifiers[stream.name] = specifier - - # Create the PyReshaper object - rshpr = reshaper.create_reshaper(specifiers, serial=False, verbosity=1) - - # Run the conversion (slice-to-series) process - rshpr.convert() - - # Print timing diagnostics - rshpr.print_diagnostics() - -In the above example, we assume the properly formatted data (like the -data shown in the single-stream example above) is contained in the list -called ``streams``. In addition to the data needed by each Specifier -(i.e., the data defining each ``stream`` instance), this example assumes that -a name has been given to each stream, contained in the attribute -``stream.name``. Each Specifier is then contained in a dictionary with keys -corresponding to the stream name and values corresponding to the stream -Specifier. This name will be used when printing diagnostic information during -the ``convert()`` and ``print_diagnostics()`` operations of the PyReshaper. - -Alternatively, the specifiers object (in the above example) can be a -Python list, instead of a Python dictionary. If this is the case, the -list of Specifier objects will be converted to a dictionary, with the -keys of the dictionary corresponding to the list index (i.e., an -integer). - -It is important to note that when running multiple streams through one -PyReshaper, however, load-balancing may not be ideal. Some streams may only -have a handful of time-series variables, while other streams may have a -large number of time-series variables. Since the PyReshaper parallelizes over -time-series variables, this means that the ideal number of MPI processes for -best performance of one stream may be very different than for another. Hence, -running multiple streams through one PyReshaper can lead to either a large number -of MPI processes sitting idle (with no time-series variables to write) or -not enough MPI processes to achieve optimal speed. - -Using the PyReshaper from the Unix Command-Line ------------------------------------------------ - -While the most flexible way of using the PyReshaper is from within -Python, as described above, it is also possible to run the PyReshaper -from the command-line. In this section, we describe how to use the -Python scripts ``s2smake`` and ``s2srun``, which provide command-line -interfaces (CLI) to the PyReshaper. (These scripts will be installed in the -``$PREFIX/bin`` directory, where ``PREFIX`` is the installation root -directory.) - -The ``s2smake`` utility is designed to generate a Specifier object file -(*specfile*) that contains a Specifier that can be used in a PyReshaper run. -The ``s2srun`` utility is then used to run the PyReshaper with the newly -generated Specifier. The *specfile* is a convenient way of saving Specifier -information for future use or reference. - -Below is an example of how to use the PyReshaper's ``s2smake`` utility, -with all options and parameters specified on the command line. - -:: - - $ s2smake \ - --netcdf_format="netcdf4" \ - --compression_level=1 \ - --output_prefix="/path/to/outfile_prefix." \ - --output_suffix=".000101-001012.nc" \ - -m "time" -m "time_bounds" \ - --specfile=example.s2s \ - /path/to/infiles/*.nc - -In this example, you will note that we have specified each -time-dependent metadata variable name with its own ``-m`` option. (In -this case, there are only 2, ``time`` and ``time_bounds``.) We have also -specified the list of input (time-slice) files using a wildcard, which -the Unix shell fills in with a list of all filenames that match this *glob* -*pattern*. In this case, we are specifying all files with the ``.nc`` file -extension in the directory ``/path/to/infiles``. These command-line options -and arguments specify all of the same input passed to the Specifier objects -in the examples of the previous section. This script will create a -Specifier object with the options passed via the command line, and it will -save this Specifier object in *specfile* called ``example.s2s``. - -When using *glob patterns*, it is important to understand that the *shell* -expands these glob patterns out into the full list of matching filenames -*before* running the ``s2smake`` command. On many systems, the length of -a shell command is limited to a fixed number of characters, and it is possible -for the *glob pattern* to expand to a length that makes the command too long -for the shell to execute! If this is the case, you may contain your glob -pattern in quotation marks (i.e., ``"/path/to/infiles/*.nc"`` instead of -``/path/to/infiles/*.nc``). The ``s2smake`` command will then expand the -glob pattern internally, allowing you to avoid the command-line character -limit of the system. - -With the Specifier created and saved to file using the ``s2smake`` utility, -we can run the PyReshaper with this Specifier using the ``s2srun`` utility, -with all options and parameters specified on the command line. - -:: - - $ s2srun --serial --verbosity=2 example.s2s - -The example above shows the execution, in serial, of the PyReshaper job -specified by the ``example.s2s`` Specifier object file with a verbosity -level of 2. - -For parallel operation, one must launch the ``s2srun`` script from -the appropriate MPI launcher. On the NCAR Yellowstone system -(``yellowstone.ucar.edu``), for example, this is done with the following -command. - -:: - - $ mpirun.lsf s2srun --verbosity=3 example.s2s - -In the above example, this will launch the ``s2srun`` script into -the MPI environment already created by either a request for an -interactive session or from an LSF submission script. - -The Specifier object files, or *specfiles*, described above can be generated -from within Python, too. These files are serialized instances of Specifier -objects, saved to a file. The serializing tool assumed is Python's ``pickle`` -library. To generate your own *specfile* from within Python, do the following. -.. code:: py - - import pickle - - # Assume "spec" is an existing Specifier instance - pickle.dump(spec, open("specfile.s2s", "wb")) - -Similarly, a *specfile* can be read with the following Python code. - -.. code:: py - - import pickle - - spec = pickle.load( open("specfile.s2s", "rb") ) - -Additional Arguments to the ``s2srun`` Script -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -While the basic options shown in the previous examples above are -sufficient for most purposes, two a options are available. - -- ``--limit``: This command-line option can be used to set the - ``output_limit`` argument of the PyReshaper ``convert()`` function, - described previously. +Arguments to the ``convert()`` Function +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +While not shown in the above examples, there are named arguments that can +be passed to the ``convert()`` function of the Reshaper object. + +- ``output_limit``: This argument sets an integer limit on the number of + time-series files generated during the ``convert()`` operation (per MPI process). + This can be useful for debugging purposes, as it can greatly reduce the length + of time consumed in the ``convert()`` function. A value of ``0`` indicates + no limit, or all output files will be generated. + +- ``chunks``: This argument sets a dictionary of dimension names to chunk-sizes. + This is equivalent to the ``--chunk`` command-line option to ``s2srun``. This option + determines the size of the data "chunk" read from a single input file (and then written + to an output file) along each given dimension. If a chunk-size is greater than the given + dimension size, then the entire dimension will be read at once. If a chunk-size is less + than the given dimension size, then all variables that depend on that dimension will be + read in multiple parts, each "chunk" being written before the next is read. This can be + important to control memory use. By default, the ``chunks`` parameter is equal to + ``None``, which means chunking is done over the unlimited dimension with a chunk-size of 1. -- ``--write_mode``: This command-line option can be used to set - the ``wmode`` output file write mode parameter of the ``create_reshaper()`` - function, described previously. Obtaining Best Performance with the PyReshaper ---------------------------------------------- diff --git a/docs/source/pyreshaper.rst b/docs/source/pyreshaper.rst index 11378a60..dd536a51 100644 --- a/docs/source/pyreshaper.rst +++ b/docs/source/pyreshaper.rst @@ -14,4 +14,5 @@ Submodules specification reshaper + iobackend diff --git a/scripts/s2smake b/scripts/s2smake index a1e600e6..3f9bde01 100755 --- a/scripts/s2smake +++ b/scripts/s2smake @@ -6,7 +6,7 @@ This script is used to generate Specifier object files ("specfiles") that can then be run directly from the command-line with the corresponding "runs2sspec" tool. -Copyright 2015, University Corporation for Atmospheric Research +Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.rst file for details """ @@ -27,6 +27,12 @@ def cli(argv=None): options and write the Specifier to a named file.""" parser = optparse.OptionParser(prog='s2smake', description=desc) + parser.add_option('-1', '--meta1d', default=False, action='store_true', + help=("Treat all 1D time-variant variables as metadata " + "variables")) + parser.add_option('-b', '--backend', default='netCDF4', type='string', + help=("I/O backend to be used when reading or writing " + "from NetCDF files ('Nio' or 'netCDF4')")) parser.add_option('-c', '--compression_level', type='int', default=1, help=('NetCDF compression level, when using the ' 'netcdf4 format. [Default: 1]')) @@ -51,6 +57,13 @@ def cli(argv=None): 'file will be named according to the rule: ' 'output_prefix + variable_name + output_suffix ' '[Default: ".nc"]')) + parser.add_option('--time_series', action='append', default=None, + help=('Names of variables to be given their own output file. ' + 'There may be more than one --time_series option given, and ' + 'each name will be appended to a list. If the --time_series ' + 'option is not used, then all time-variant variables that ' + 'are not labelled as metadata will be given their own ' + 'output file. [Default: None]')) opts, args = parser.parse_args(argv) # Check that input files are given @@ -67,6 +80,11 @@ def cli(argv=None): raise ValueError(("Unacceptable NetCDF format " "{0}".format(opts.netcdf_format))) + # Check the I/O backend name + if opts.backend not in ['Nio', 'netCDF4']: + raise ValueError(("Unacceptable NetCDF backend name " + "{0}".format(opts.backend))) + return opts, args #============================================================================== @@ -85,11 +103,14 @@ def main(argv=None): # Add input to the specifier spec.input_file_list = full_input_file_list + spec.io_backend = opts.backend spec.compression_level = opts.compression_level spec.netcdf_format = opts.netcdf_format spec.output_file_prefix = opts.output_prefix spec.output_file_suffix = opts.output_suffix + spec.time_series = opts.time_series spec.time_variant_metadata = opts.metadata + spec.assume_1d_time_variant_metadata = opts.meta1d # Validate before saving spec.validate() diff --git a/scripts/s2srun b/scripts/s2srun index 9136d91a..e0cbb744 100755 --- a/scripts/s2srun +++ b/scripts/s2srun @@ -6,7 +6,7 @@ This script is designed to run a specfile (i.e., a Pickled Specifier object). The specfile itself should be constructed from a hand-written Python script, or from the makes2sspec tool that accompanies this script. -Copyright 2015, University Corporation for Atmospheric Research +Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.rst file for details """ @@ -32,6 +32,13 @@ def cli(argv=None): action='store_true', dest='once', help=('Whether to write a "once" file with all ' 'metadata. [Default: False]')) + parser.add_option('-c', '--chunk', default=None, action='append', dest='chunks', + help=('Chunk size for a named dimension. This should ' + 'be given as a comma-separated pair (e.g., NAME,SIZE) ' + 'indicating the name of the dimension to chunk over ' + 'and the chunk size. Multiple chunk options can be ' + 'given on the command line, each one enabling chunking ' + 'over a new dimension. [Default: None]')) parser.add_option('-l', '--limit', default=0, type='int', help=('The limit on the number of time-series files per ' 'processor to write. Useful when debugging. A ' @@ -60,6 +67,9 @@ def cli(argv=None): else: specfile = args[0] + if opts.chunks is not None: + opts.chunks = dict((c.split(',')[0],int(c.split(',')[1])) for c in opts.chunks) + return opts, specfile @@ -85,7 +95,7 @@ def main(argv=None): once=opts.once) # Run the conversion (slice-to-series) process - reshpr.convert(output_limit=opts.limit) + reshpr.convert(output_limit=opts.limit, chunks=opts.chunks) # Print timing diagnostics reshpr.print_diagnostics() diff --git a/setup.py b/setup.py index 19f309f3..34fee70c 100755 --- a/setup.py +++ b/setup.py @@ -10,23 +10,14 @@ exec(open('source/pyreshaper/version.py').read()) -try: - import Nio -except ImportError: - raise ImportError('PyNIO 1.4+ is required to install PyReshaper.') - -if Nio.__version__ < 1.4: - raise ImportError('PyNIO 1.4+ is required to install PyReshaper.') - - setup(name='PyReshaper', version=__version__, description='Python Time-Slice to Time-Series NetCDF Converter', author='Kevin Paul', author_email='kpaul@ucar.edu', - url='https://github.com/NCAR-CISL-ASAP/PyReshaper', - download_url='https://github.com/NCAR-CISL-ASAP/PyReshaper/tarball/v' + __version__, - license='https://github.com/NCAR-CISL-ASAP/PyReshaper/blob/master/LICENSE.rst', + url='https://github.com/NCAR/PyReshaper', + download_url='https://github.com/NCAR/PyReshaper/tarball/v' + __version__, + license='https://github.com/NCAR/PyReshaper/blob/master/LICENSE.rst', packages=['pyreshaper'], package_dir={'pyreshaper': 'source/pyreshaper'}, package_data={'pyreshaper': ['LICENSE.rst']}, diff --git a/source/pyreshaper/iobackend.py b/source/pyreshaper/iobackend.py new file mode 100644 index 00000000..00b84b33 --- /dev/null +++ b/source/pyreshaper/iobackend.py @@ -0,0 +1,359 @@ +""" +The module containing the PyReshaper configuration specification class + +This is a configuration specification class, through which the input to +the PyReshaper code is specified. Currently all types of supported +operations for the PyReshaper are specified with derived dypes of the +Specification class. + +Copyright 2016, University Corporation for Atmospheric Research +See the LICENSE.rst file for details +""" + +import numpy + +try: + _dict_ = __import__('collections', fromlist=['OrderedDict']).OrderedDict +except: + try: + _dict_ = __import__('ordereddict', fromlist=['OrderedDict']).OrderedDict + except: + _dict_ = dict + + +_AVAILABLE_ = [] +_BACKEND_MAP_ = {} + +_BACKEND_ = None +_IOLIB_ = None + +# FIRST PREFERENCE +try: + _NC4_ = __import__('netCDF4') +except: + _NC4_ = None +if _NC4_ is not None: + _AVAILABLE_.append('netCDF4') + _BACKEND_MAP_['netCDF4'] = _NC4_ + if hasattr(_NC4_, '._netCDF4'): + _NC4_VAR_ = _NC4_._netCDF4.Variable + else: + _NC4_VAR_ = _NC4_.Variable + +# SECOND PREFERENCE +try: + _NIO_ = __import__('Nio') +except: + _NIO_ = None +if _NIO_ is not None: + _AVAILABLE_.append('Nio') + _BACKEND_MAP_['Nio'] = _NIO_ + + +#=============================================================================== +# is_available +#=============================================================================== +def is_available(name=None): + if name is None: + return len(_AVAILABLE_) > 0 + else: + return name in _BACKEND_MAP_ + + +#=============================================================================== +# set_backend - Set the backend to the one named or first preferred +#=============================================================================== +def set_backend(name=None): + global _BACKEND_ + global _IOLIB_ + if name is None: + if is_available(): + _BACKEND_ = _AVAILABLE_[0] + _IOLIB_ = _BACKEND_MAP_[_BACKEND_] + else: + raise RuntimeError('No I/O Backends available') + else: + if is_available(name): + _BACKEND_ = name + _IOLIB_ = _BACKEND_MAP_[name] + else: + raise KeyError('I/O Backend {0!r} not available'.format(name)) + +# Set Default backend +set_backend() + + +#=============================================================================== +# get_backend - Get the currently set backend name +#=============================================================================== +def get_backend(): + return _BACKEND_ + + +#=============================================================================== +# NCFile +#=============================================================================== +class NCFile(object): + """ + Wrapper class for netCDF files/datasets + """ + + def __init__(self, filename, mode='r', ncfmt='netcdf4', compression=0): + """ + Initializer + + Parameters: + filename (str): Name of netCDF file to open + mode (str): Write-mode ('r' for read, 'w' for write, 'a' for append) + ncfmt (str): Format to use of the netcdf file, if being created + ('netcdf' or 'netcdf4') + compression (int): Level of compression to use when writing to this + netcdf file + """ + if not isinstance(filename, (str, unicode)): + err_msg = "Netcdf filename must be a string" + raise TypeError(err_msg) + if not isinstance(mode, (str, unicode)): + err_msg = "Netcdf write mode must be a string" + raise TypeError(err_msg) + if not isinstance(ncfmt, (str, unicode)): + err_msg = "Netcdf file format must be a string" + raise TypeError(err_msg) + if not isinstance(compression, int): + err_msg = "Netcdf file compression must be an integer" + raise TypeError(err_msg) + + if mode not in ['r', 'w', 'a']: + err_msg = ("Netcdf write mode {0!r} is not one of " + "'r', 'w', or 'a'").format(mode) + raise ValueError(err_msg) + if ncfmt not in ['netcdf', 'netcdf4', 'netcdf4c']: + err_msg = ("Netcdf format {0!r} is not one of " + "'netcdf', 'netcdf4', or 'netcdf4c'").format(mode) + raise ValueError(err_msg) + if compression > 9 or compression < 0: + err_msg = ("Netcdf compression level {0} is not in range " + "0 to 9").format(compression) + raise ValueError(err_msg) + + self._mode = mode + self._backend = get_backend() + self._iolib = _IOLIB_ + + self._file_opts = {} + self._var_opts = {} + + if self._backend == 'Nio': + file_options = _IOLIB_.options() + file_options.PreFill = False + if ncfmt == 'netcdf': + file_options.Format = 'Classic' + elif ncfmt == 'netcdf4': + file_options.Format = 'NetCDF4Classic' + file_options.CompressionLevel = compression + elif ncfmt == 'netcdf4c': + file_options.Format = 'NetCDF4Classic' + file_options.CompressionLevel = 1 + self._file_opts = {"options": file_options} + + if mode == 'r': + self._obj = self._iolib.open_file(filename) + else: + self._obj = self._iolib.open_file(filename, mode, + **self._file_opts) + + elif self._backend == 'netCDF4': + if ncfmt == 'netcdf': + self._file_opts["format"] = "NETCDF3_64BIT" + elif ncfmt == 'netcdf4': + self._file_opts["format"] = "NETCDF4_CLASSIC" + if compression > 0: + self._var_opts["zlib"] = True + self._var_opts["complevel"] = int(compression) + elif ncfmt == 'netcdf4c': + self._file_opts["format"] = "NETCDF4_CLASSIC" + self._var_opts["zlib"] = True + self._var_opts["complevel"] = 1 + + if mode == 'r': + self._obj = self._iolib.Dataset(filename) + else: + self._obj = self._iolib.Dataset(filename, mode, + **self._file_opts) + + @property + def dimensions(self): + """ + Return the dimension sizes dictionary + """ + if self._backend == 'Nio': + return self._obj.dimensions + elif self._backend == 'netCDF4': + return _dict_((n, len(d)) for n, d + in self._obj.dimensions.iteritems()) + else: + return _dict_() + + def unlimited(self, name): + """ + Return whether the dimension named is unlimited + + Parameters: + name (str): Name of dimension + """ + if self._backend == 'Nio': + return self._obj.unlimited(name) + elif self._backend == 'netCDF4': + return self._obj.dimensions[name].isunlimited() + + @property + def ncattrs(self): + if self._backend == 'Nio': + return self._obj.attributes.keys() + elif self._backend == 'netCDF4': + return self._obj.ncattrs() + + def getncattr(self, name): + if self._backend == 'Nio': + return self._obj.attributes[name] + elif self._backend == 'netCDF4': + return self._obj.getncattr(name) + + def setncattr(self, name, value): + if self._mode == 'r': + raise RuntimeError('Cannot set attribute in read mode') + if self._backend == 'Nio': + setattr(self._obj, name, value) + elif self._backend == 'netCDF4': + self._obj.setncattr(name, value) + + @property + def variables(self): + return _dict_((n, NCVariable(v, self._mode)) for n, v + in self._obj.variables.iteritems()) + + def create_dimension(self, name, value=None): + if self._mode == 'r': + raise RuntimeError('Cannot create dimension in read mode') + if self._backend == 'Nio': + self._obj.create_dimension(name, value) + elif self._backend == 'netCDF4': + self._obj.createDimension(name, value) + + def create_variable(self, name, datatype, dimensions): + if self._mode == 'r': + raise RuntimeError('Cannot create variable in read mode') + if self._backend == 'Nio': + dt = numpy.dtype(datatype) + if dt.char == 'S': + typecode = 'c' + else: + typecode = dt.char + var = self._obj.create_variable(name, typecode, dimensions) + elif self._backend == 'netCDF4': + var = self._obj.createVariable(name, datatype, dimensions, + **self._var_opts) + return NCVariable(var, self._mode) + + def close(self): + self._obj.close() + + +#=============================================================================== +# NCVariable +#=============================================================================== +class NCVariable(object): + """ + Wrapper class for NetCDF variables + """ + + def __init__(self, vobj, mode='r'): + self._mode = mode + self._obj = vobj + if _NC4_ is not None and isinstance(vobj, _NC4_VAR_): + self._backend = 'netCDF4' + self._iolib = _NC4_ + elif _NIO_ is not None: + self._backend = 'Nio' + self._iolib = _NIO_ + else: + self._backend = None + self._iolib = None + + @property + def ncattrs(self): + if self._backend == 'Nio': + return self._obj.attributes.keys() + elif self._backend == 'netCDF4': + return self._obj.ncattrs() + + def getncattr(self, name): + if self._backend == 'Nio': + return self._obj.attributes[name] + elif self._backend == 'netCDF4': + return self._obj.getncattr(name) + + def setncattr(self, name, value): + if self._mode == 'r': + raise RuntimeError('Cannot set attribute in read mode') + if self._backend == 'Nio': + setattr(self._obj, name, value) + elif self._backend == 'netCDF4': + self._obj.setncattr(name, value) + + @property + def dimensions(self): + return self._obj.dimensions + + @property + def shape(self): + return self._obj.shape + + @property + def size(self): + if self._backend == 'Nio': + return numpy.prod(self.shape) + elif self._backend == 'netCDF4': + return self._obj.size + + @property + def datatype(self): + if self._backend == 'Nio': + return numpy.dtype(self._obj.typecode()) + elif self._backend == 'netCDF4': + return self._obj.dtype + + def get_value(self): + if self._backend == 'Nio': + return self._obj.get_value() + elif self._backend == 'netCDF4': + if self._obj.shape == (): + return self._obj.getValue() + else: + return self._obj[...] + + def assign_value(self, value): + if self._mode == 'r': + raise RuntimeError('Cannot assign value in read mode') + if self._backend == 'Nio': + self._obj.assign_value(value) + elif self._backend == 'netCDF4': + if self._obj.shape == (): + self._obj.assignValue(value) + else: + self._obj[:] = value + + def __getitem__(self, key): + return self._obj[key] + + def __setitem__(self, key, value): + if self._mode == 'r': + raise RuntimeError('Cannot set variable in read mode') + self._obj[key] = value + + +#=============================================================================== +# COMMAND-LINE OPERATION +#=============================================================================== +if __name__ == '__main__': + pass diff --git a/source/pyreshaper/reshaper.py b/source/pyreshaper/reshaper.py index ec44eaac..62d7427e 100644 --- a/source/pyreshaper/reshaper.py +++ b/source/pyreshaper/reshaper.py @@ -5,19 +5,17 @@ are defined. Currently, only one operation has been implemented (i.e., the time-slice to time-series operation). -Copyright 2015, University Corporation for Atmospheric Research +Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.rst file for details """ # Built-in imports -import abc +from sys import platform from os import linesep, remove, rename from os.path import exists, isfile # Third-party imports import numpy -from Nio import open_file as nio_open_file -from Nio import options as nio_options from asaptools.simplecomm import create_comm, SimpleComm from asaptools.timekeeper import TimeKeeper from asaptools.partition import WeightBalanced @@ -25,6 +23,23 @@ # PyReshaper imports from specification import Specifier +import iobackend + +# For memory diagnostics +from resource import getrusage, RUSAGE_SELF + + +#=================================================================================================== +# _memory_usage_resource_ +#=================================================================================================== +def _get_memory_usage_MB_(): + """ + Return the maximum memory use of this Python process in MB + """ + to_MB = 1024. + if platform == 'darwin': + to_MB *= to_MB + return getrusage(RUSAGE_SELF).ru_maxrss / to_MB #============================================================================== @@ -65,30 +80,12 @@ def create_reshaper(specifier, serial=False, verbosity=1, wmode='w', """ # Determine the type of Reshaper object to instantiate if isinstance(specifier, Specifier): - return Slice2SeriesReshaper(specifier, - serial=serial, - verbosity=verbosity, - wmode=wmode, - once=once, - simplecomm=simplecomm) - elif isinstance(specifier, (list, tuple)): - spec_dict = dict([(str(i), s) for (i, s) in enumerate(specifier)]) - return create_reshaper(spec_dict, - serial=serial, - verbosity=verbosity, - wmode=wmode, - once=once, - simplecomm=simplecomm) - elif isinstance(specifier, dict): - if not all([isinstance(s, Specifier) for s in specifier.values()]): - err_msg = 'Multiple specifiers must all be of Specifier type' - raise TypeError(err_msg) - return MultiSpecReshaper(specifier, - serial=serial, - verbosity=verbosity, - wmode=wmode, - once=once, - simplecomm=simplecomm) + return Reshaper(specifier, + serial=serial, + verbosity=verbosity, + wmode=wmode, + once=once, + simplecomm=simplecomm) else: err_msg = 'Specifier of type ' + str(type(specifier)) + ' is not a ' \ + 'valid Specifier object.' @@ -155,36 +152,10 @@ def _pprint_dictionary(title, dictionary, order=None): #============================================================================== -# Reshaper Abstract Base Class +# Reshaper Base Class #============================================================================== class Reshaper(object): - """ - Abstract base class for Reshaper objects - """ - - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def convert(self): - """ - Method to perform the Reshaper's designated operation. - """ - return - - @abc.abstractmethod - def print_diagnostics(self): - """ - Print out timing and I/O information collected up to this point - """ - return - - -#============================================================================== -# Reshaper Class -#============================================================================== -class Slice2SeriesReshaper(Reshaper): - """ The time-slice to time-series Reshaper class @@ -258,6 +229,7 @@ def __init__(self, specifier, serial=False, verbosity=1, wmode='w', self._timer.start('Initializing Simple Communicator') if simplecomm is None: simplecomm = create_comm(serial=serial) + # Reference to the simple communicator self._simplecomm = simplecomm self._timer.stop('Initializing Simple Communicator') @@ -280,30 +252,45 @@ def __init__(self, specifier, serial=False, verbosity=1, wmode='w', if self._simplecomm.is_manager(): self._vprint(' Specifier validated', verbosity=1) + # The I/O backend to use + self._backend = specifier.io_backend + if iobackend.is_available(specifier.io_backend): + self._backend = specifier.io_backend + else: + self._backend = iobackend.get_backend() + self._vprint((' I/O Backend {0} not available. Using {1} ' + 'instead').format(specifier.io_backend, self._backend), + verbosity=1) + # Store the input file names self._input_filenames = specifier.input_file_list + # Store the time-series variable names + self._time_series_names = specifier.time_series + if self._time_series_names is not None: + vnames = ', '.join(self._time_series_names) + self._vprint('WARNING: Extracting only variables: {0}'.format(vnames), verbosity=-1) + # Store the list of metadata names self._metadata_names = specifier.time_variant_metadata + # Store whether to treat 1D time-variant variables as metadata + self._1d_metadata = specifier.assume_1d_time_variant_metadata + # Store the output file prefix and suffix self._output_prefix = specifier.output_file_prefix self._output_suffix = specifier.output_file_suffix - # Setup PyNIO options (including disabling the default PreFill option) - opt = nio_options() - opt.PreFill = False - - # Determine the Format and CompressionLevel options - # from the NetCDF format string in the Specifier - if specifier.netcdf_format == 'netcdf': - opt.Format = 'Classic' - elif specifier.netcdf_format in ['netcdf4', 'netcdf4c']: - opt.Format = 'NetCDF4Classic' - opt.CompressionLevel = specifier.compression_level - self._nio_options = opt + # Setup NetCDF file options + self._netcdf_format = specifier.netcdf_format + self._netcdf_compression = specifier.compression_level if self._simplecomm.is_manager(): - self._vprint(' PyNIO options set', verbosity=1) + self._vprint(' NetCDF I/O Backend: {0}'.format(self._backend), + verbosity=1) + self._vprint(' NetCDF Output Format: {0}'.format(self._netcdf_format), + verbosity=1) + self._vprint(' NetCDF Compression: {0}'.format(self._netcdf_compression), + verbosity=1) # Helpful debugging message if self._simplecomm.is_manager(): @@ -318,6 +305,8 @@ def _inspect_input_files(self): We check the file contents here. """ + # Set the I/O backend according to what is specified + iobackend.set_backend(self._backend) # Initialize the list of variable names for each category self._time_variant_metadata = [] @@ -329,7 +318,7 @@ def _inspect_input_files(self): #===== INSPECT FIRST INPUT FILE ===== # Open first file - ifile = nio_open_file(self._input_filenames[0]) + ifile = iobackend.NCFile(self._input_filenames[0]) # Look for the 'unlimited' dimension try: @@ -340,22 +329,22 @@ def _inspect_input_files(self): raise LookupError(err_msg) # Get the time values - time_values = [ifile.variables[self._unlimited_dim].get_value()] - - # Get the list of variable names and missing variables - var_names = set(ifile.variables.keys()) - missing_vars = set() + time_values = [ifile.variables[self._unlimited_dim][:]] # Categorize each variable (only looking at first file) for var_name, var in ifile.variables.iteritems(): if self._unlimited_dim not in var.dimensions: self._time_invariant_metadata.append(var_name) - elif var_name in self._metadata_names: + elif (var_name in self._metadata_names or + (self._1d_metadata and len(var.dimensions) == 1)): self._time_variant_metadata.append(var_name) - else: - size = numpy.dtype(var.typecode()).itemsize - size = size * numpy.prod(var.shape) - all_tsvars[var_name] = size + elif (self._time_series_names is None or + var_name in self._time_series_names): + all_tsvars[var_name] = var.datatype.itemsize * var.size + + # Get the list of variable names and missing variables + var_names = set(all_tsvars.keys() + self._time_invariant_metadata + self._time_variant_metadata) + missing_vars = set() # Close the first file ifile.close() @@ -372,7 +361,7 @@ def _inspect_input_files(self): # (4) Check if there are any missing variables # (5) Get the time values from the files for ifilename in self._input_filenames[1:]: - ifile = nio_open_file(ifilename) + ifile = iobackend.NCFile(ifilename) # Determine the unlimited dimension if self._unlimited_dim not in ifile.dimensions: @@ -389,8 +378,7 @@ def _inspect_input_files(self): raise LookupError(err_msg) # Get the time values (list of NDArrays) - time_values.append( - ifile.variables[self._unlimited_dim].get_value()) + time_values.append(ifile.variables[self._unlimited_dim][:]) # Get the missing variables var_names_next = set(ifile.variables.keys()) @@ -406,10 +394,9 @@ def _inspect_input_files(self): # Make sure that the list of variables in each file is the same if len(missing_vars) != 0: - warning = ("WARNING: The first input file has variables that are " - "not in all input files:{0}{1}").format(linesep, ' ') - for var in missing_vars: - warning += ' {0}'.format(var) + warning = ("WARNING: The first input file has variables " + "that are not in all input files:{0} " + "{1}").format(linesep, ', '.join(sorted(missing_vars))) self._vprint(warning, header=True, verbosity=0) if self._simplecomm.is_manager(): @@ -471,6 +458,7 @@ def _inspect_output_files(self): we check whether the output files exist. By default, if the output file """ + iobackend.set_backend(self._backend) # Loop through the time-series variables and generate output filenames self._time_series_filenames = \ @@ -482,17 +470,18 @@ def _inspect_output_files(self): if isfile(f)] # Set the starting step index for each variable - self._time_series_step_index = \ - dict([(variable, 0) for variable in self._time_series_variables]) + self._time_series_step_index = dict([(variable, 0) for variable in + self._time_series_variables]) # If overwrite is enabled, delete all existing files first if self._write_mode == 'o': if self._simplecomm.is_manager() and len(self._existing) > 0: - self._vprint('WARNING: Deleting existing output files for ' - 'time-series variables: {0}'.format(self._existing), + self._vprint('WARNING: Deleting existing output files for time-series ' + 'variables: {0}'.format(', '.join(self._existing)), verbosity=0) for variable in self._existing: remove(self._time_series_filenames[variable]) + self._existing = [] # Or, if skip existing is set, remove the existing time-series # variables from the list of time-series variables to convert @@ -515,13 +504,12 @@ def _inspect_output_files(self): filename = self._time_series_filenames[variable] # Open the time-series file for inspection - tsfile = nio_open_file(filename, 'r') + tsfile = iobackend.NCFile(filename) # Check that the file has the unlimited dim and var if not tsfile.unlimited(self._unlimited_dim): - err_msg = ("Cannot append to time-series file with " - "missing unlimited dimension " - "'{0}'").format(self._unlimited_dim) + err_msg = ("Cannot append to time-series file with missing unlimited " + "dimension {0!r}").format(self._unlimited_dim) raise RuntimeError(err_msg) # Check for once file @@ -548,8 +536,7 @@ def _inspect_output_files(self): raise RuntimeError(err_msg) # Get the starting step index to start writing from - self._time_series_step_index[variable] = \ - tsfile.dimensions[self._unlimited_dim] + self._time_series_step_index[variable] = tsfile.dimensions[self._unlimited_dim] # Close the time-series file tsfile.close() @@ -560,7 +547,105 @@ def _inspect_output_files(self): "variables: {0}").format(self._existing) raise RuntimeError(err_msg) - def convert(self, output_limit=0): + def _create_var(self, in_file, out_file, vname): + in_var = in_file.variables[vname] + out_var = out_file.create_variable(vname, in_var.datatype, in_var.dimensions) + for att_name in in_var.ncattrs: + att_value = in_var.getncattr(att_name) + out_var.setncattr(att_name, att_value) + + def _chunk_iter(self, vobj, chunks={}, corder=True): + """ + This is a generator function to iterator over chunks of arrays with named dimensions + + Parameters: + vobj: A NetCDF file variable object with dimensions and shape attributes + chunks (dict): A dictionary of dimension names mapped to chunk sizes along that + named dimension + corder (bool): Whether to assume the array has C-style axis ordering, where the + fastest changing dimension is assumed to be the first axis. If False, then + the fastest changing dimension is assumed to be the last. + """ + dimensions = vobj.dimensions + shape = vobj.shape + + nchunks = 1 + dchunks = [] + for dname, dlen in zip(dimensions, shape): + if dname in chunks: + clen = chunks[dname] + cnum = dlen // clen + if dlen % clen > 0: + cnum += 1 + nchunks *= cnum + else: + clen = dlen + cnum = 1 + dchunks.append((dlen, clen, cnum)) + + for n in xrange(nchunks): + cidx = [] + nidx = n + nstride = nchunks + if corder: + diter = reversed(dchunks) + else: + diter = iter(dchunks) + for dlen, clen, cnum in diter: + nstride = nstride // cnum + cidx.append(nidx // nstride) + nidx = nidx % nstride + if corder: + cidx.reverse() + + cslice = [] + for d in xrange(len(shape)): + ic = cidx[d] + dlen, clen, cnum = dchunks[d] + + ibeg = ic * clen + iend = (ic + 1) * clen + if iend >= dlen: + iend = dlen + + cslice.append(slice(ibeg, iend)) + + yield tuple(cslice) + + def _offset_chunk(self, chunk, vobj, offset): + """ + Compute a new chunk/slice for a variable with a given offset + """ + new_chunk = [] + for i, d in enumerate(vobj.dimensions): + if d in offset: + o = offset[d] + else: + o = 0 + new_chunk.append(slice(chunk[i].start + o, chunk[i].stop + o)) + return tuple(new_chunk) + + def _copy_var(self, kind, in_var, out_var, chunks={}, offsets={}): + """ + Copy variable data from one variable object to another via chunking + """ + for rslice in self._chunk_iter(in_var, chunks=chunks): + + self._timer.start('Read {0}'.format(kind)) + tmp_data = in_var[rslice] + self._timer.stop('Read {0}'.format(kind)) + wslice = self._offset_chunk(rslice, out_var, offsets) + self._timer.start('Write {0}'.format(kind)) + out_var[wslice] = tmp_data + self._timer.stop('Write {0}'.format(kind)) + + requested_nbytes = tmp_data.nbytes if hasattr(tmp_data, 'nbytes') else 0 + self._byte_counts['Requested Data'] += requested_nbytes + actual_nbytes = (self.assumed_block_size * + numpy.ceil(requested_nbytes / self.assumed_block_size)) + self._byte_counts['Actual Data'] += actual_nbytes + + def convert(self, output_limit=0, chunks=None): """ Method to perform the Reshaper's designated operation. @@ -572,14 +657,17 @@ def convert(self, output_limit=0): to 0, no limit is placed. This limits the number of output files produced by each processor in a parallel run. + chunks (dict): A dictionary of dimension names mapped to chunk sizes + along that named dimension """ + iobackend.set_backend(self._backend) + # Type checking input if type(output_limit) is not int: err_msg = 'Output limit must be an integer' raise TypeError(err_msg) # Start the total convert process timer - self._simplecomm.sync() self._timer.start('Complete Conversion Process') # Validate the input files themselves @@ -600,9 +688,26 @@ def convert(self, output_limit=0): if self._simplecomm.is_manager(): self._vprint('...Output files inspected.', verbosity=0) + # Check the chunking + if chunks is None: + # Default chunking is over 1 time-step at a time + chunks = {self._unlimited_dim: 1} + if not isinstance(chunks, dict): + err_msg = 'Chunks must be specified with a dictionary' + raise TypeError(err_msg) + for key, value in chunks.iteritems(): + if not isinstance(key, basestring): + err_msg = 'Chunks dictionary must have string-type keys' + raise TypeError(err_msg) + if not isinstance(value, int): + err_msg = 'Chunks dictionary must have integer chunk sizes' + raise TypeError(err_msg) + # Debugging output if self._simplecomm.is_manager(): + self._vprint('Read chunk sizes: {0!s}'.format(chunks), verbosity=1) self._vprint('Converting time-slices to time-series...', verbosity=0) + self._simplecomm.sync() # Partition the time-series variables across all processors tsv_names_loc = self._time_series_variables @@ -633,11 +738,6 @@ def convert(self, output_limit=0): self._byte_counts['Requested Data'] = 0 self._byte_counts['Actual Data'] = 0 - # Defining a simple helper function to determine the bytes size of - # a variable given to it, whether an NDArray or not - def _get_bytesize(data): - return data.nbytes if hasattr(data, 'nbytes') else 0 - #===== LOOP OVER TIME_SERIES VARIABLES ===== # Loop over all time-series variables @@ -662,33 +762,33 @@ def _get_bytesize(data): remove(temp_filename) if self._write_mode == 'a' and out_name in self._existing: rename(out_filename, temp_filename) - out_file = nio_open_file(temp_filename, 'a', - options=self._nio_options) + out_file = iobackend.NCFile(temp_filename, 'a', self._netcdf_format, + self._netcdf_compression) appending = True else: - out_file = nio_open_file(temp_filename, 'w', - options=self._nio_options) + out_file = iobackend.NCFile(temp_filename, 'w', self._netcdf_format, + self._netcdf_compression) appending = False self._timer.stop('Open Output Files') - # Start the loop over input files (i.e., time-steps) - series_step_index = self._time_series_step_index[out_name] + # Start the loop over input files (i.e., time-slices) + offsets = {self._unlimited_dim: self._time_series_step_index[out_name]} for in_filename in self._input_filenames: # Open the input file self._timer.start('Open Input Files') - in_file = nio_open_file(in_filename, 'r') + in_file = iobackend.NCFile(in_filename) self._timer.stop('Open Input Files') # Create header info, if this is the first input file if in_filename == self._input_filenames[0] and not appending: # Copy file attributes and dimensions to output file - for name, val in in_file.attributes.iteritems(): - setattr(out_file, name, val) + for name in in_file.ncattrs: + out_file.setncattr(name, in_file.getncattr(name)) for name, val in in_file.dimensions.iteritems(): if name == self._unlimited_dim: - out_file.create_dimension(name, None) + out_file.create_dimension(name) else: out_file.create_dimension(name, val) @@ -698,21 +798,13 @@ def _get_bytesize(data): # Time-invariant metadata variables self._timer.start('Create Time-Invariant Metadata') for name in self._time_invariant_metadata: - in_var = in_file.variables[name] - out_var = out_file.create_variable( - name, in_var.typecode(), in_var.dimensions) - for att_name, att_val in in_var.attributes.iteritems(): - setattr(out_var, att_name, att_val) + self._create_var(in_file, out_file, name) self._timer.stop('Create Time-Invariant Metadata') # Time-variant metadata variables self._timer.start('Create Time-Variant Metadata') for name in self._time_variant_metadata: - in_var = in_file.variables[name] - out_var = out_file.create_variable( - name, in_var.typecode(), in_var.dimensions) - for att_name, att_val in in_var.attributes.iteritems(): - setattr(out_var, att_name, att_val) + self._create_var(in_file, out_file, name) self._timer.stop('Create Time-Variant Metadata') # Create the time-series variable @@ -720,11 +812,7 @@ def _get_bytesize(data): # Time-series variable self._timer.start('Create Time-Series Variables') - in_var = in_file.variables[out_name] - out_var = out_file.create_variable( - out_name, in_var.typecode(), in_var.dimensions) - for att_name, att_val in in_var.attributes.iteritems(): - setattr(out_var, att_name, att_val) + self._create_var(in_file, out_file, out_name) self._timer.stop('Create Time-Series Variables') dbg_msg = ('Writing output file for variable: ' @@ -735,83 +823,29 @@ def _get_bytesize(data): # Copy the time-invariant metadata if write_meta_data: - for name in self._time_invariant_metadata: in_var = in_file.variables[name] out_var = out_file.variables[name] - self._timer.start('Read Time-Invariant Metadata') - tmp_data = in_var.get_value() - self._timer.stop('Read Time-Invariant Metadata') - self._timer.start('Write Time-Invariant Metadata') - out_var.assign_value(tmp_data) - self._timer.stop('Write Time-Invariant Metadata') - - requested_nbytes = _get_bytesize(tmp_data) - self._byte_counts[ - 'Requested Data'] += requested_nbytes - actual_nbytes = self.assumed_block_size \ - * numpy.ceil(requested_nbytes / self.assumed_block_size) - self._byte_counts['Actual Data'] += actual_nbytes - - # Get the number of time steps in this slice file - num_steps = in_file.dimensions[self._unlimited_dim] - - # Explicitly loop over time steps (to control memory use) - for slice_step_index in xrange(num_steps): - - # Copy the time-varient metadata - if write_meta_data: - - for name in self._time_variant_metadata: - in_var = in_file.variables[name] - out_var = out_file.variables[name] - ndims = len(in_var.dimensions) - udidx = in_var.dimensions.index( - self._unlimited_dim) - in_slice = [slice(None)] * ndims - in_slice[udidx] = slice_step_index - out_slice = [slice(None)] * ndims - out_slice[udidx] = series_step_index - self._timer.start('Read Time-Variant Metadata') - tmp_data = in_var[tuple(in_slice)] - self._timer.stop('Read Time-Variant Metadata') - self._timer.start('Write Time-Variant Metadata') - out_var[tuple(out_slice)] = tmp_data - self._timer.stop('Write Time-Variant Metadata') - - requested_nbytes = _get_bytesize(tmp_data) - self._byte_counts[ - 'Requested Data'] += requested_nbytes - actual_nbytes = self.assumed_block_size \ - * numpy.ceil(requested_nbytes / self.assumed_block_size) - self._byte_counts['Actual Data'] += actual_nbytes - - # Copy the time-series variables - if write_tser_data: - - in_var = in_file.variables[out_name] - out_var = out_file.variables[out_name] - ndims = len(in_var.dimensions) - udidx = in_var.dimensions.index(self._unlimited_dim) - in_slice = [slice(None)] * ndims - in_slice[udidx] = slice_step_index - out_slice = [slice(None)] * ndims - out_slice[udidx] = series_step_index - self._timer.start('Read Time-Series Variables') - tmp_data = in_var[tuple(in_slice)] - self._timer.stop('Read Time-Series Variables') - self._timer.start('Write Time-Series Variables') - out_var[tuple(out_slice)] = tmp_data - self._timer.stop('Write Time-Series Variables') - - requested_nbytes = _get_bytesize(tmp_data) - self._byte_counts['Requested Data'] += requested_nbytes - actual_nbytes = self.assumed_block_size \ - * numpy.ceil(requested_nbytes / self.assumed_block_size) - self._byte_counts['Actual Data'] += actual_nbytes - - # Increment the time-series step index - series_step_index += 1 + self._copy_var('Time-Invariant Metadata', in_var, out_var, + chunks=chunks) + + # Copy the time-varient metadata + if write_meta_data: + for name in self._time_variant_metadata: + in_var = in_file.variables[name] + out_var = out_file.variables[name] + self._copy_var('Time-Variant Metadata', in_var, out_var, + chunks=chunks, offsets=offsets) + + # Copy the time-series variables + if write_tser_data: + in_var = in_file.variables[out_name] + out_var = out_file.variables[out_name] + self._copy_var('Time-Series Variables', in_var, out_var, + chunks=chunks, offsets=offsets) + + # Increment the time-series index offset + offsets[self._unlimited_dim] += in_file.dimensions[self._unlimited_dim] # Close the input file self._timer.start('Close Input Files') @@ -833,8 +867,7 @@ def _get_bytesize(data): # Information self._simplecomm.sync() if self._simplecomm.is_manager(): - self._vprint(('Finished converting time-slices ' - 'to time-series.'), verbosity=0) + self._vprint('Finished converting time-slices to time-series.', verbosity=0) # Finish clocking the entire convert procedure self._timer.stop('Complete Conversion Process') @@ -847,6 +880,8 @@ def print_diagnostics(self): # Get all totals and maxima my_times = self._timer.get_all_times() max_times = self._simplecomm.allreduce(my_times, op='max') + my_memory = {'Maximum Memory Use': _get_memory_usage_MB_()} + max_memory = self._simplecomm.allreduce(my_memory, op='max') my_bytes = self._byte_counts total_bytes = self._simplecomm.allreduce(my_bytes, op='sum') @@ -868,170 +903,8 @@ def print_diagnostics(self): if self._simplecomm.is_manager(): self._vprint(byte_count_str, verbosity=-1) + # Print maximum memory use in MB + memory_str = _pprint_dictionary('MEMORY USAGE (MB)', max_memory) + if self._simplecomm.is_manager(): + self._vprint(memory_str, verbosity=-1) -#============================================================================== -# MultiSpecReshaper Class -#============================================================================== -class MultiSpecReshaper(Reshaper): - - """ - Multiple Slice-to-Series Reshaper class - - This class is designed to deal with lists of multiple - Slice2SeriesSpecifiers at a time. Instead of being instantiated - (or initialized) with a single Slice2SeriesSpecifier, - it takes a dictionary of Slice2SeriesSpecifier objects. - """ - - def __init__(self, specifiers, serial=False, verbosity=1, wmode='w', - once=False, simplecomm=None): - """ - Constructor - - Parameters: - specifiers (dict): A dict of named Specifier instances, each - defining an input specification for this reshaper operation. - serial (bool): True or False, indicating whether the operation - should be performed in serial (True) or parallel - (False). The default is to assume parallel operation - (but serial will be chosen if the mpi4py cannot be - found when trying to initialize decomposition. - verbosity(int): Level of printed output (stdout). A value of 0 - means no output, and a higher value means more output. The - default value is 1. - wmode (str): The mode to use for writing output. Can be 'w' for - normal write operation, 's' to skip the output generation for - existing time-series files, 'o' to overwrite existing - time-series files, 'a' to append to existing time-series files. - once (bool): True or False, indicating whether the Reshaper should - write all metadata to a 'once' file (separately). - simplecomm (SimpleComm): A SimpleComm object to handle the parallel - communication, if necessary - """ - - # Check types - if not isinstance(specifiers, dict): - err_msg = "Input must be given in a dictionary of Specifiers" - raise TypeError(err_msg) - if type(serial) is not bool: - err_msg = "Serial indicator must be True or False." - raise TypeError(err_msg) - if type(verbosity) is not int: - err_msg = "Verbosity level must be an integer." - raise TypeError(err_msg) - if type(wmode) is not str: - err_msg = "Write mode flag must be a str." - raise TypeError(err_msg) - if type(once) is not bool: - err_msg = "Once-file indicator must be True or False." - raise TypeError(err_msg) - if simplecomm is not None: - if not isinstance(simplecomm, SimpleComm): - err_msg = "Simple communicator object is not a SimpleComm" - raise TypeError(err_msg) - if wmode not in ['w', 's', 'o', 'a']: - err_msg = "Write mode '{0}' not recognized".format(wmode) - raise ValueError(err_msg) - - # Whether to write to a once file - self._use_once_file = once - - # Output file write mode - self._write_mode = wmode - - # Store the list of specifiers - self._specifiers = specifiers - - # Store the serial specifier - self._serial = serial - - # Check for a SimpleComm, and if none create it - if simplecomm is None: - simplecomm = create_comm(serial=serial) - - # Pointer to its own messenger - self._simplecomm = simplecomm - - # Store the verbosity - self._verbosity = verbosity - - # Set the verbose printer - self._vprint = VPrinter(verbosity=verbosity) - - # Storage for timing data - self._times = {} - - # Orders for printing timing data - self._time_orders = {} - - # Storage for all byte counters - self._byte_counts = {} - - def convert(self, output_limit=0): - """ - Method to perform each Reshaper's designated operation. - - Loops through and creates each Reshaper, calls each Reshaper's - convert() method, and pulls the timing data out for each convert - operation. - - Parameters: - output_limit (int): Limit on the number of output (time-series) - files to write during the convert() operation. If set - to 0, no limit is placed. This limits the number - of output files produced by each processor in a - parallel run. - """ - # Type checking input - if type(output_limit) is not int: - err_msg = 'Output limit must be an integer' - raise TypeError(err_msg) - - # Loop over all specifiers - for spec_name in self._specifiers: - if self._simplecomm.is_manager(): - self._vprint('--- Converting Specifier: ' + - str(spec_name), verbosity=0) - - rshpr = create_reshaper(self._specifiers[spec_name], - serial=self._serial, - verbosity=self._verbosity, - wmode=self._write_mode, - once=self._use_once_file, - simplecomm=self._simplecomm) - rshpr.convert(output_limit=output_limit) - - this_times = rshpr._timer.get_all_times() - self._times[spec_name] = rshpr._simplecomm.allreduce( - this_times, op='max') - self._time_orders[spec_name] = rshpr._timer.get_names() - this_count = rshpr._byte_counts - self._byte_counts[spec_name] = rshpr._simplecomm.allreduce( - this_count, op='sum') - - if self._simplecomm.is_manager(): - self._vprint('--- Finished converting Specifier: ' + - str(spec_name) + linesep, verbosity=0) - self._simplecomm.sync() - - def print_diagnostics(self): - """ - Print out timing and I/O information collected up to this point - """ - # Loop through all timers - for name in self._specifiers: - if self._simplecomm.is_manager(): - self._vprint('Specifier: ' + str(name), verbosity=0) - - times = self._times[name] - o = self._time_orders[name] - times_str = _pprint_dictionary('TIMING DATA', times, order=o) - if self._simplecomm.is_manager(): - self._vprint(times_str, verbosity=0) - - counts = self._byte_counts[name] - for name in counts: - counts[name] = counts[name] / float(1024 * 1024) - counts_str = _pprint_dictionary('BYTE COUNTS (MB)', counts) - if self._simplecomm.is_manager(): - self._vprint(counts_str, verbosity=0) diff --git a/source/pyreshaper/specification.py b/source/pyreshaper/specification.py index 206a0aea..30f45676 100644 --- a/source/pyreshaper/specification.py +++ b/source/pyreshaper/specification.py @@ -6,7 +6,7 @@ operations for the PyReshaper are specified with derived dypes of the Specification class. -Copyright 2015, University Corporation for Atmospheric Research +Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.rst file for details """ @@ -50,7 +50,10 @@ def __init__(self, compression=0, prefix='tseries.', suffix='.nc', + timeseries=None, metadata=[], + meta1d=False, + backend='netCDF4', **kwargs): """ Initializes the internal data with optional arguments. @@ -72,9 +75,18 @@ def __init__(self, to all time-series output files suffix (str): String specifying the suffix common to all time-series output files + timeseries (list): List of variable names to extract + out from the input time-slices into their own + time-series files. If None, then all non-metadata + time-variant variables will be treated as time-series + variables. metadata (list): List of variable names specifying the variables that should be included in every time-series output file + meta1d (bool): True if 1D time-variant variables should + be treated as metadata variables, False otherwise. + backend (str): Which I/O backend to use ('Nio' for + PyNIO, 'netCDF4' for netCDF4-python) kwargs (dict): Optional arguments describing the Reshaper run """ @@ -96,10 +108,18 @@ def __init__(self, # prefix + variable_name + suffix) self.output_file_suffix = suffix - # List of time-variant variables that should be included in all - # output files. + # List of time-variant variables that should be given their own output file + self.time_series = timeseries + + # List of time-variant variables that should be included in all output files. self.time_variant_metadata = metadata + # Whether all 1D time-variant variables should be treated as metadata + self.assume_1d_time_variant_metadata = meta1d + + # Store the netCDF I/O backend name + self.io_backend = backend + # Optional arguments associated with the reshaper operation self.options = kwargs @@ -128,12 +148,12 @@ def validate_types(self): # Validate that each input file name is a string for ifile_name in self.input_file_list: - if not isinstance(ifile_name, str): + if not isinstance(ifile_name, basestring): err_msg = "Input file names must be given as strings" raise TypeError(err_msg) # Validate the netcdf format string - if not isinstance(self.netcdf_format, str): + if not isinstance(self.netcdf_format, basestring): err_msg = "NetCDF format must be given as a string" raise TypeError(err_msg) @@ -143,15 +163,25 @@ def validate_types(self): raise TypeError(err_msg) # Validate the output file prefix - if not isinstance(self.output_file_prefix, str): + if not isinstance(self.output_file_prefix, basestring): err_msg = "Output file prefix must be given as a string" raise TypeError(err_msg) # Validate the output file suffix - if not isinstance(self.output_file_suffix, str): + if not isinstance(self.output_file_suffix, basestring): err_msg = "Output file suffix must be given as a string" raise TypeError(err_msg) + # Validate the type of the time-series variable list + if self.time_series is not None: + if not isinstance(self.time_series, list): + err_msg = "Time-series variables must be a list or None" + raise TypeError(err_msg) + for var_name in self.time_series: + if not isinstance(var_name, basestring): + err_msg = "Time-series variable names must be given as strings" + raise TypeError(err_msg) + # Validate the type of the time-variant metadata list if not isinstance(self.time_variant_metadata, list): err_msg = "Time-variant metadata must be a list" @@ -159,11 +189,21 @@ def validate_types(self): # Validate the type of each time-variant metadata variable name for var_name in self.time_variant_metadata: - if not isinstance(var_name, str): - err_msg = "Time-variant metadata variable names must be " + \ - "given as strings" + if not isinstance(var_name, basestring): + err_msg = ("Time-variant metadata variable names must be " + "given as strings") raise TypeError(err_msg) + # Validate the type of assume_1d_time_variant_metadata + if not isinstance(self.assume_1d_time_variant_metadata, bool): + err_msg = "Flag to assume 1D time-variant metadata must be boolean" + raise TypeError(err_msg) + + # Validate the type of the backend + if not isinstance(self.io_backend, basestring): + err_msg = "I/O backend must be given as a string" + raise TypeError(err_msg) + def validate_values(self): """ Method to validate the values of the Specifier data. @@ -173,7 +213,7 @@ def validate_values(self): We impose the (somewhat arbitrary) rule that the Specifier should not validate values what require "cracking" open the input files themselves. Hence, we validate values that can - be checked without any PyNIO file I/O (including reading the + be checked without any NetCDF file I/O (including reading the header information). This method will correct some input if it is safe to do so. @@ -224,6 +264,8 @@ def validate_values(self): if (self.output_file_suffix[-3:] != '.nc'): self.output_file_suffix += '.nc' + # Backend validated when PyReshaper is run ONLY! + def write(self, fname): """ Write the specifier to a file diff --git a/source/pyreshaper/test/iobackendTests.py b/source/pyreshaper/test/iobackendTests.py new file mode 100644 index 00000000..c7a7e325 --- /dev/null +++ b/source/pyreshaper/test/iobackendTests.py @@ -0,0 +1,1120 @@ +""" +Unit tests for the iobackend module + +Copyright 2016, University Corporation for Atmospheric Research +See the LICENSE.rst file for details +""" + +import unittest +import numpy as np +import numpy.testing as npt +import netCDF4 +import Nio + +from pyreshaper import iobackend +from os import linesep, remove +from os.path import exists + + +#=============================================================================== +# print_test_msg +#=============================================================================== +def print_test_msg(testname, indata=None, actual=None, expected=None): + msg = '{0}:{1}'.format(testname, linesep) + if indata is not None: + msg += ' - input: {0!r}{1}'.format(indata, linesep) + if actual is not None: + msg += ' - actual: {0!r}{1}'.format(actual, linesep) + if expected is not None: + msg += ' - expected: {0!r}{1}'.format(expected, linesep) + print msg + + +#=============================================================================== +# IOBackendReadTests +#=============================================================================== +class IOBackendReadTests(unittest.TestCase): + + """ + IOBackendReadTests Class + + This class defines all of the unit tests for the iobackend module. + """ + + def setUp(self): + self.ncfrname = 'readtest.nc' + self.ncattrs = {'a1': 'attribute 1', + 'a2': 'attribute 2'} + self.ncdims = {'t': 10, 'x': 5, 'c': 14} + self.t = np.arange(0, self.ncdims['t'], dtype='d') + self.x = np.random.ranf(self.ncdims['x']).astype('d') + self.v = np.random.ranf(self.ncdims['t']*self.ncdims['x']).reshape(10,5).astype('f') + self.s = np.array([c for c in 'this is a stri']) + self.vattrs = {'long_name': 'variable', + 'units': 'meters'} + + ncfile = netCDF4.Dataset(self.ncfrname, 'w') + for a,v in self.ncattrs.iteritems(): + setattr(ncfile, a, v) + ncfile.createDimension('t') + ncfile.createDimension('x', self.ncdims['x']) + ncfile.createDimension('c', self.ncdims['c']) + t = ncfile.createVariable('t', 'd', ('t',)) + t[:] = self.t + x = ncfile.createVariable('x', 'd', ('x',)) + x[:] = self.x + v = ncfile.createVariable('v', 'f', ('t', 'x')) + for a,val in self.vattrs.iteritems(): + v.setncattr(a, val) + v[:,:] = self.v + s = ncfile.createVariable('s', 'S1', ('c',)) + s[:] = self.s + + ncfile.close() + + def tearDown(self): + if exists(self.ncfrname): + remove(self.ncfrname) + + def test_avail(self): + actual = iobackend._AVAILABLE_ + print_test_msg('_AVAIL_', actual=actual) + self.assertTrue('Nio' in iobackend._AVAILABLE_, + 'Nio importable but not available') + self.assertTrue('netCDF4' in iobackend._AVAILABLE_, + 'netCDF4 importable but not available') + + def test_set_backend_nio(self): + indata = 'Nio' + iobackend.set_backend(indata) + actual = iobackend._BACKEND_ + expected = indata + print_test_msg('set_backend()', indata, actual, expected) + self.assertEqual(iobackend._BACKEND_, indata, + 'PyNIO backend name not set') + + def test_set_backend_nc4(self): + indata = 'netCDF4' + iobackend.set_backend(indata) + actual = iobackend._BACKEND_ + expected = indata + print_test_msg('set_backend()', indata, actual, expected) + self.assertEqual(iobackend._BACKEND_, indata, + 'netCDF4 backend name not set') + + def test_set_backend_x(self): + indata = 'x' + actual = iobackend._BACKEND_ + print_test_msg('set_backend()', indata, actual) + self.assertRaises(KeyError, iobackend.set_backend, indata) + + def test_NCFile_init_mode_x(self): + expected = ValueError + print_test_msg('NCFile.__init__(mode=x)', expected=expected) + self.assertRaises(expected, iobackend.NCFile, self.ncfrname, + mode='x') + + def test_nio_NCFile_init_read(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = type(ncf) + expected = iobackend.NCFile + ncf.close() + print_test_msg('Nio: NCFile.__init__()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile not created with correct type') + + def test_nc4_NCFile_init_read(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = type(ncf) + expected = iobackend.NCFile + ncf.close() + print_test_msg('netCDF4: NCFile.__init__()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile not created with correct type') + + def test_cmp_NCFile_init_read(self): + iobackend.set_backend('Nio') + ncf_nio = iobackend.NCFile(self.ncfrname) + actual = type(ncf_nio) + ncf_nio.close() + iobackend.set_backend('netCDF4') + ncf_nc4 = iobackend.NCFile(self.ncfrname) + expected = type(ncf_nc4) + ncf_nc4.close() + print_test_msg('CMP: NCFile.__init__()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile not created with consisten types') + + def test_nio_NCFile_dimensions(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.dimensions + expected = self.ncdims + ncf.close() + print_test_msg('Nio: NCFile.dimensions', actual=actual, expected=expected) + self.assertEqual(len(actual), len(expected), + 'NCFile dimensions not correct length') + for dn, dv in expected.iteritems(): + self.assertTrue(dn in actual, + 'NCFile dimension {0!r} not present'.format(dn)) + self.assertEqual(actual[dn], dv, + 'NCFile dimension {0!r} not correct'.format(dn)) + + def test_nc4_NCFile_dimensions(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.dimensions + expected = self.ncdims + ncf.close() + print_test_msg('netCDF4: NCFile.dimensions', actual=actual, expected=expected) + self.assertEqual(len(actual), len(expected), + 'NCFile dimensions not correct length') + for dn, dv in expected.iteritems(): + self.assertTrue(dn in actual, + 'NCFile dimension {0!r} not present'.format(dn)) + self.assertEqual(actual[dn], dv, + 'NCFile dimension {0!r} not correct'.format(dn)) + + def test_cmp_NCFile_dimensions(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.dimensions + ncf.close() + iobackend.set_backend('netCDF4') + ncf2 = iobackend.NCFile(self.ncfrname) + expected = ncf2.dimensions + ncf2.close() + print_test_msg('CMP: NCFile.dimensions', actual=actual, expected=expected) + self.assertEqual(len(actual), len(expected), + 'NCFile dimensions not consistent length') + for dn, dv in expected.iteritems(): + self.assertTrue(dn in actual, + 'NCFile dimension {0!r} not present'.format(dn)) + self.assertEqual(actual[dn], dv, + 'NCFile dimension {0!r} not correct'.format(dn)) + + def test_nio_NCFile_unlimited(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.unlimited('t') + expected = True + print_test_msg('Nio: NCFile.unlimited(t)', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile dimension t not unlimited') + actual = ncf.unlimited('x') + expected = False + print_test_msg('Nio: NCFile.unlimited(x)', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile dimension x not limited') + ncf.close() + + def test_nc4_NCFile_unlimited(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.unlimited('t') + expected = True + print_test_msg('netCDF4: NCFile.unlimited(t)', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile dimension t not unlimited') + actual = ncf.unlimited('x') + expected = False + print_test_msg('netCDF4: NCFile.unlimited(x)', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile dimension x not limited') + ncf.close() + + def test_cmp_NCFile_unlimited(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + t_unlimited_nio = ncf.unlimited('t') + x_unlimited_nio = ncf.unlimited('x') + ncf.close() + iobackend.set_backend('netCDF4') + ncf2 = iobackend.NCFile(self.ncfrname) + t_unlimited_nc4 = ncf2.unlimited('t') + x_unlimited_nc4 = ncf2.unlimited('x') + ncf2.close() + print_test_msg('CMP: NCFile.unlimited(t)', + actual=t_unlimited_nio, expected=t_unlimited_nc4) + self.assertEqual(t_unlimited_nio, t_unlimited_nc4, + 'NCFile dimension t unlimited results inconsistent') + print_test_msg('CMP: NCFile.unlimited(x)', + actual=x_unlimited_nio, expected=x_unlimited_nc4) + self.assertEqual(x_unlimited_nio, x_unlimited_nc4, + 'NCFile dimension x unlimited results inconsistent') + + def test_nio_NCFile_ncattrs(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.ncattrs + expected = self.ncattrs.keys() + ncf.close() + print_test_msg('Nio: NCFile.ncattrs', actual=actual, expected=expected) + self.assertEqual(len(actual), len(expected), + 'NCFile ncattrs not correct length') + for dn in expected: + self.assertTrue(dn in actual, + 'NCFile ncattrs {0!r} not present'.format(dn)) + self.assertEqual(ncf.getncattr(dn), self.ncattrs[dn], + 'NCFile ncattrs {0!r} not correct'.format(dn)) + + def test_nc4_NCFile_ncattrs(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.ncattrs + expected = self.ncattrs.keys() + print_test_msg('netCDF4: NCFile.ncattrs', + actual=actual, expected=expected) + self.assertEqual(len(actual), len(expected), + 'NCFile ncattrs not correct length') + for xname in expected: + self.assertTrue(xname in actual, + 'NCFile ncattrs {0!r} not present'.format(xname)) + xval = self.ncattrs[xname] + aval = ncf.getncattr(xname) + self.assertEqual(aval, xval, + 'NCFile ncattrs {0!r} not correct'.format(xname)) + ncf.close() + + def test_cmp_NCFile_ncattrs(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + nio_anames = ncf.ncattrs + nio_avalues = [ncf.getncattr(n) for n in nio_anames] + ncf.close() + iobackend.set_backend('netCDF4') + ncf2 = iobackend.NCFile(self.ncfrname) + nc4_anames = ncf2.ncattrs + nc4_avalues = [ncf2.getncattr(n) for n in nc4_anames] + ncf2.close() + print_test_msg('CMP: NCFile.ncattrs', + actual=zip(nio_anames, nio_avalues), + expected=zip(nc4_anames, nc4_avalues)) + self.assertEqual(len(nio_anames), len(nc4_anames), + 'NCFile ncattrs inconsistent lengths') + for aname, aval in zip(nio_anames, nio_avalues): + self.assertTrue(aname in nc4_anames, + 'NCFile ncattrs {0!r} not present'.format(aname)) + xval = nc4_avalues[nc4_anames.index(aname)] + self.assertEqual(aval, xval, + 'NCFile ncattrs {0!r} not correct'.format(aname)) + + def test_nio_NCFile_variables(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables + ncf.close() + print_test_msg('Nio: NCFile.variables', actual=actual) + self.assertEqual(len(actual), 4, + 'NCFile variables not correct length') + self.assertTrue('t' in actual, + 't variable not in NCFile') + self.assertTrue('x' in actual, + 'x variable not in NCFile') + self.assertTrue('v' in actual, + 'v variable not in NCFile') + for vn, vo in actual.iteritems(): + self.assertTrue(isinstance(vo, iobackend.NCVariable), + 'Variable {0!r} has wrong type'.format(vn)) + + def test_nc4_NCFile_variables(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables + ncf.close() + print_test_msg('netCDF4: NCFile.variables', actual=actual) + self.assertEqual(len(actual), 4, + 'NCFile variables not correct length') + self.assertTrue('t' in actual, + 't variable not in NCFile') + self.assertTrue('x' in actual, + 'x variable not in NCFile') + self.assertTrue('v' in actual, + 'v variable not in NCFile') + for vn, vo in actual.iteritems(): + self.assertTrue(isinstance(vo, iobackend.NCVariable), + 'Variable {0!r} has wrong type'.format(vn)) + + def test_cmp_NCFile_variables(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + variables_nio = ncf.variables + ncf.close() + iobackend.set_backend('netCDF4') + ncf2 = iobackend.NCFile(self.ncfrname) + variables_nc4 = ncf2.variables + ncf2.close() + print_test_msg('CMP: NCFile.variables', + actual=variables_nio, expected=variables_nc4) + self.assertEqual(len(variables_nio), len(variables_nc4), + 'NCFile variables inconsistent length') + self.assertTrue('t' in variables_nio, + 't variable not in Nio NCFile') + self.assertTrue('x' in variables_nio, + 'x variable not in Nio NCFile') + self.assertTrue('v' in variables_nio, + 'v variable not in Nio NCFile') + self.assertTrue('t' in variables_nc4, + 't variable not in netCDF4 NCFile') + self.assertTrue('x' in variables_nc4, + 'x variable not in netCDF4 NCFile') + self.assertTrue('v' in variables_nc4, + 'v variable not in netCDF4 NCFile') + + def test_nio_NCFile_close(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.close() + print_test_msg('Nio: NCFile.close', actual=actual) + + def test_nc4_NCFile_close(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.close() + print_test_msg('netCDF4: NCFile.close', actual=actual) + + def test_nio_NCVariable_ncattrs(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].ncattrs + expected = self.vattrs.keys() + print_test_msg('Nio: NCVariable.ncattrs', + actual=actual, expected=expected) + self.assertEqual(len(actual), len(expected), + 'NCVariable ncattrs not correct length') + for a in expected: + self.assertTrue(a in actual, + 'Attribute {0!r} not found in variable'.format(a)) + self.assertEqual(ncf.variables['v'].getncattr(a), self.vattrs[a], + 'Attribute {0!r} not correct'.format(a)) + ncf.close() + + def test_nc4_NCVariable_ncattrs(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].ncattrs + expected = self.vattrs.keys() + print_test_msg('netCDF4: NCVariable.ncattrs', + actual=actual, expected=expected) + self.assertEqual(len(actual), len(expected), + 'NCVariable ncattrs not correct length') + for a in expected: + self.assertTrue(a in actual, + 'Attribute {0!r} not found in variable'.format(a)) + self.assertEqual(ncf.variables['v'].getncattr(a), self.vattrs[a], + 'Attribute {0!r} not correct'.format(a)) + ncf.close() + + def test_cmp_NCVariable_ncattrs(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + v_anames_nio = ncf.variables['v'].ncattrs + v_avalues_nio = [ncf.variables['v'].getncattr(n) for n in v_anames_nio] + ncf.close() + iobackend.set_backend('netCDF4') + ncf2 = iobackend.NCFile(self.ncfrname) + v_anames_nc4 = ncf2.variables['v'].ncattrs + v_avalues_nc4 = [ncf2.variables['v'].getncattr(n) for n in v_anames_nc4] + ncf2.close() + print_test_msg('CMP: NCVariable.ncattrs', + actual=zip(v_anames_nio, v_avalues_nio), + expected=zip(v_anames_nc4, v_avalues_nc4)) + self.assertEqual(len(v_anames_nio), len(v_anames_nc4), + 'NCVariable ncattrs inconsistent length') + for a, v in zip(v_anames_nio, v_avalues_nio): + self.assertTrue(a in v_anames_nc4, + 'Attribute {0!r} not found in variable'.format(a)) + v2 = v_avalues_nc4[v_anames_nc4.index(a)] + self.assertEqual(v, v2, + 'Attribute {0!r} not correct'.format(a)) + + def test_nio_NCVariable_dimensions(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].dimensions + expected = ('t', 'x') + ncf.close() + print_test_msg('Nio: NCVariable.dimensions', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable dimensions not correct') + + def test_nc4_NCVariable_dimensions(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].dimensions + expected = ('t', 'x') + ncf.close() + print_test_msg('netCDF4: NCVariable.dimensions', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable dimensions not correct') + + def test_cmp_NCVariable_dimensions(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].dimensions + ncf.close() + iobackend.set_backend('netCDF4') + ncf2 = iobackend.NCFile(self.ncfrname) + expected = ncf2.variables['v'].dimensions + ncf2.close() + print_test_msg('CMP: NCVariable.dimensions', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable dimensions not correct') + + def test_nio_NCVariable_datatype(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].datatype + expected = 'f' + ncf.close() + print_test_msg('Nio: NCVariable.datatype', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable datatype not correct') + + def test_nc4_NCVariable_datatype(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].datatype + expected = 'f' + ncf.close() + print_test_msg('netCDF4: NCVariable.datatype', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable datatype not correct') + + def test_cmp_NCVariable_datatype(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].datatype + ncf.close() + iobackend.set_backend('netCDF4') + ncf2 = iobackend.NCFile(self.ncfrname) + expected = ncf2.variables['v'].datatype + ncf2.close() + print_test_msg('CMP: NCVariable.datatype', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable datatype not correct') + + def test_cmp_NCVariable_datatype_s(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['s'].datatype + ncf.close() + iobackend.set_backend('netCDF4') + ncf2 = iobackend.NCFile(self.ncfrname) + expected = ncf2.variables['s'].datatype + ncf2.close() + print_test_msg('CMP: NCVariable[string].datatype', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable string datatype not correct') + + def test_nio_NCVariable_shape(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].shape + expected = (self.ncdims['t'], self.ncdims['x']) + ncf.close() + print_test_msg('Nio: NCVariable.shape', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable shape not correct') + + def test_nc4_NCVariable_shape(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].shape + expected = (self.ncdims['t'], self.ncdims['x']) + ncf.close() + print_test_msg('netCDF4: NCVariable.shape', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable shape not correct') + + def test_cmp_NCVariable_shape(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].shape + ncf.close() + iobackend.set_backend('netCDF4') + ncf2 = iobackend.NCFile(self.ncfrname) + expected = ncf2.variables['v'].shape + ncf2.close() + print_test_msg('CMP: NCVariable.shape', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable shape not correct') + + def test_nio_NCVariable_size(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].size + expected = self.ncdims['t'] * self.ncdims['x'] + ncf.close() + print_test_msg('Nio: NCVariable.size', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable size not correct') + + def test_nc4_NCVariable_size(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].size + expected = self.ncdims['t'] * self.ncdims['x'] + ncf.close() + print_test_msg('netCDF4: NCVariable.size', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable size not correct') + + def test_cmp_NCVariable_size(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'].size + ncf.close() + iobackend.set_backend('netCDF4') + ncf2 = iobackend.NCFile(self.ncfrname) + expected = ncf2.variables['v'].size + ncf2.close() + print_test_msg('CMP: NCVariable.size', + actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCVariable size not correct') + + def test_nio_NCVariable_getitem(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'][:] + expected = self.v[:] + print_test_msg('NCVariable[v].__getitem__', + actual=actual, expected=expected) + npt.assert_array_equal(actual, expected) + actual = ncf.variables['t'][:] + expected = self.t[:] + print_test_msg('NCVariable[t].__getitem__', + actual=actual, expected=expected) + npt.assert_array_equal(actual, expected) + actual = ncf.variables['x'][:] + expected = self.x[:] + print_test_msg('NCVariable[x].__getitem__', + actual=actual, expected=expected) + npt.assert_array_equal(actual, expected) + ncf.close() + + + def test_nc4_NCVariable_getitem(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfrname) + actual = ncf.variables['v'][:] + expected = self.v[:] + print_test_msg('NCVariable[v].__getitem__', + actual=actual, expected=expected) + npt.assert_array_equal(actual, expected) + actual = ncf.variables['t'][:] + expected = self.t[:] + print_test_msg('NCVariable[t].__getitem__', + actual=actual, expected=expected) + npt.assert_array_equal(actual, expected) + actual = ncf.variables['x'][:] + expected = self.x[:] + print_test_msg('NCVariable[x].__getitem__', + actual=actual, expected=expected) + npt.assert_array_equal(actual, expected) + ncf.close() + + +#=============================================================================== +# IOBackendWriteTests +#=============================================================================== +class IOBackendWriteTests(unittest.TestCase): + + """ + IOBackendWriteTests Class + + This class defines all of the unit tests for the iobackend module. + """ + + def setUp(self): + self.ncfwname = 'writetest.nc' + self.ncattrs = {'a1': 'attribute 1', + 'a2': 'attribute 2'} + self.ncdims = {'t': 10, 'x': 5} + self.t = np.arange(0, self.ncdims['t'], dtype='d') + self.x = np.random.ranf(self.ncdims['x']).astype('d') + self.v = np.random.ranf(self.ncdims['t']*self.ncdims['x']).reshape(10,5).astype('f') + self.vattrs = {'long_name': 'variable', + 'units': 'meters'} + + def tearDown(self): + if exists(self.ncfwname): + remove(self.ncfwname) + + def test_nio_NCFile_init_write(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + actual = type(ncf) + ncf.close() + expected = iobackend.NCFile + print_test_msg('NCFile.__init__()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile not created with correct type') + + def test_nc4_NCFile_init_write(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + actual = type(ncf) + ncf.close() + expected = iobackend.NCFile + print_test_msg('NCFile.__init__()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile not created with correct type') + + def test_nio_NCFile_setncattr(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + for a,v in self.ncattrs.iteritems(): + ncf.setncattr(a, v) + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.attributes + expected = self.ncattrs + ncfr.close() + print_test_msg('NCFile.setncattr()', actual=actual, expected=expected) + for a,v in expected.iteritems(): + self.assertTrue(a in actual, + 'NCFile attribute {0!r} not found'.format(a)) + self.assertEqual(actual[a], v, + 'NCFile attribute {0!r} incorrect'.format(a)) + + def test_nc4_NCFile_setncattr(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + for a,v in self.ncattrs.iteritems(): + ncf.setncattr(a, v) + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.attributes + expected = self.ncattrs + ncfr.close() + print_test_msg('NCFile.setncattr()', actual=actual, expected=expected) + for a,v in expected.iteritems(): + self.assertTrue(a in actual, + 'NCFile attribute {0!r} not found'.format(a)) + self.assertEqual(actual[a], v, + 'NCFile attribute {0!r} incorrect'.format(a)) + + def test_nio_NCFile_create_dimension(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('x', self.ncdims['x']) + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.dimensions['x'] + expected = self.ncdims['x'] + ncfr.close() + print_test_msg('NCFile.create_dimension()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile x-dimension incorrect') + + def test_nc4_NCFile_create_dimension(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('x', self.ncdims['x']) + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.dimensions['x'] + expected = self.ncdims['x'] + ncfr.close() + print_test_msg('NCFile.create_dimension()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile x-dimension incorrect') + + def test_nio_NCFile_create_dimension_unlimited(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('t') + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.dimensions['t'] + expected = 0 + ncfr.close() + print_test_msg('NCFile.create_dimension()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile t-dimension incorrect') + + def test_nc4_NCFile_create_dimension_unlimited(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('t') + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.dimensions['t'] + expected = 0 + ncfr.close() + print_test_msg('NCFile.create_dimension()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile t-dimension incorrect') + + def test_nio_NCFile_create_variable(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('x', self.ncdims['x']) + x = ncf.create_variable('x', np.dtype('d'), ('x',)) + x[:] = self.x + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.variables['x'][:] + expected = self.x + ncfr.close() + print_test_msg('NCFile.create_variable()', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile x-variable incorrect') + + def test_nc4_NCFile_create_variable(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('x', self.ncdims['x']) + x = ncf.create_variable('x', np.dtype('d'), ('x',)) + x[:] = self.x + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.variables['x'][:] + expected = self.x + ncfr.close() + print_test_msg('NCFile.create_variable()', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile x-variable incorrect') + + def test_nio_NCFile_create_variable_unlimited(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('t') + t = ncf.create_variable('t', np.dtype('d'), ('t',)) + t[:] = self.t + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.variables['t'][:] + expected = self.t + ncfr.close() + print_test_msg('NCFile.create_variable()', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile t-variable incorrect') + + def test_nc4_NCFile_create_variable_unlimited(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('t') + t = ncf.create_variable('t', np.dtype('d'), ('t',)) + t[:] = self.t + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.variables['t'][:] + expected = self.t + ncfr.close() + print_test_msg('NCFile.create_variable()', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile t-variable incorrect') + + def test_nio_NCFile_create_variable_ndim(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('t') + ncf.create_dimension('x', self.ncdims['x']) + v = ncf.create_variable('v', np.dtype('f'), ('t', 'x')) + v[:] = self.v + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.variables['v'][:] + expected = self.v + ncfr.close() + print_test_msg('NCFile.create_variable()', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile 2d-variable incorrect') + + def test_nc4_NCFile_create_variable_ndim(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('t') + ncf.create_dimension('x', self.ncdims['x']) + v = ncf.create_variable('v', np.dtype('f'), ('t', 'x')) + v[:] = self.v + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.variables['v'][:] + expected = self.v + ncfr.close() + print_test_msg('NCFile.create_variable()', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile 2d-variable incorrect') + + def test_nio_NCVariable_setncattr(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('t') + ncf.create_dimension('x', self.ncdims['x']) + v = ncf.create_variable('v', np.dtype('f'), ('t', 'x')) + for attr,value in self.vattrs.iteritems(): + v.setncattr(attr, value) + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.variables['v'].attributes + expected = self.vattrs + ncfr.close() + print_test_msg('NCVariable.setncattr()', actual=actual, expected=expected) + for attr, value in expected.iteritems(): + self.assertTrue(attr in actual, + 'Variable attribute {0!r} not found'.format(attr)) + self.assertEqual(actual[attr], value, + 'Variable attribute {0!r} incorrect'.format(attr)) + + def test_nc4_NCVariable_setncattr(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfwname, mode='w') + ncf.create_dimension('t') + ncf.create_dimension('x', self.ncdims['x']) + v = ncf.create_variable('v', np.dtype('f'), ('t', 'x')) + for attr,value in self.vattrs.iteritems(): + v.setncattr(attr, value) + ncf.close() + ncfr = Nio.open_file(self.ncfwname) + actual = ncfr.variables['v'].attributes + expected = self.vattrs + ncfr.close() + print_test_msg('NCVariable.setncattr()', actual=actual, expected=expected) + for attr, value in expected.iteritems(): + self.assertTrue(attr in actual, + 'Variable attribute {0!r} not found'.format(attr)) + self.assertEqual(actual[attr], value, + 'Variable attribute {0!r} incorrect'.format(attr)) + + +#=============================================================================== +# IOBackendAppendTests +#=============================================================================== +class IOBackendAppendTests(unittest.TestCase): + + """ + IOBackendAppendTests Class + + This class defines all of the unit tests for the iobackend module. + """ + + def setUp(self): + self.ncfaname = 'appendtest.nc' + self.ncattrs = {'a1': 'attribute 1', + 'a2': 'attribute 2'} + self.ncdims = {'t': 10, 'x': 5} + self.t = np.arange(self.ncdims['t'], dtype='d') + self.x = np.arange(self.ncdims['x'], dtype='d') + self.v = np.arange(self.ncdims['t']*self.ncdims['x'], + dtype='f').reshape(self.ncdims['t'], self.ncdims['x']) + self.vattrs = {'long_name': 'variable', + 'units': 'meters'} + + self.fattrs2 = {'a3': 'attribute 3', + 'a4': 'attribute 4'} + self.t2 = np.arange(self.ncdims['t'], 2*self.ncdims['t'], dtype='d') + self.v2 = np.arange(self.ncdims['t']*self.ncdims['x'], + dtype='f').reshape(self.ncdims['t'], self.ncdims['x']) + self.vattrs2 = {'standard_name': 'variable'} + + ncfile = netCDF4.Dataset(self.ncfaname, 'w') + for a,v in self.ncattrs.iteritems(): + setattr(ncfile, a, v) + ncfile.createDimension('t') + ncfile.createDimension('x', self.ncdims['x']) + t = ncfile.createVariable('t', 'd', ('t',)) + t[:] = self.t + x = ncfile.createVariable('x', 'd', ('x',)) + x[:] = self.x + v = ncfile.createVariable('v', 'f', ('t', 'x')) + for a,val in self.vattrs.iteritems(): + v.setncattr(a, val) + v[:,:] = self.v + + ncfile.close() + + def tearDown(self): + if exists(self.ncfaname): + remove(self.ncfaname) + + def test_nio_NCFile_init_append(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfaname, mode='a') + actual = type(ncf) + ncf.close() + expected = iobackend.NCFile + print_test_msg('NCFile.__init__()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile not created with correct type') + + def test_nc4_NCFile_init_append(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfaname, mode='a') + actual = type(ncf) + ncf.close() + expected = iobackend.NCFile + print_test_msg('NCFile.__init__()', actual=actual, expected=expected) + self.assertEqual(actual, expected, + 'NCFile not created with correct type') + + def test_nio_NCFile_setncattr(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfaname, mode='a') + for a,v in self.fattrs2.iteritems(): + ncf.setncattr(a, v) + ncf.close() + ncfr = Nio.open_file(self.ncfaname) + actual = ncfr.attributes + expected = self.ncattrs + expected.update(self.fattrs2) + ncfr.close() + print_test_msg('NCFile.setncattr()', actual=actual, expected=expected) + for a,v in expected.iteritems(): + self.assertTrue(a in actual, + 'NCFile attribute {0!r} not found'.format(a)) + self.assertEqual(actual[a], v, + 'NCFile attribute {0!r} incorrect'.format(a)) + + def test_nc4_NCFile_setncattr(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfaname, mode='a') + for a,v in self.fattrs2.iteritems(): + ncf.setncattr(a, v) + ncf.close() + ncfr = Nio.open_file(self.ncfaname) + actual = ncfr.attributes + expected = self.ncattrs + expected.update(self.fattrs2) + ncfr.close() + print_test_msg('NCFile.setncattr()', actual=actual, expected=expected) + for a,v in expected.iteritems(): + self.assertTrue(a in actual, + 'NCFile attribute {0!r} not found'.format(a)) + self.assertEqual(actual[a], v, + 'NCFile attribute {0!r} incorrect'.format(a)) + + def test_nio_NCFile_create_variable_ndim(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfaname, mode='a') + v2 = ncf.create_variable('v2', np.dtype('f'), ('t', 'x')) + v2[:] = self.v2 + ncf.close() + ncfr = Nio.open_file(self.ncfaname) + actual = ncfr.variables['v2'][:] + expected = self.v2 + ncfr.close() + print_test_msg('NCFile.create_variable()', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile 2d-variable incorrect') + + def test_nc4_NCFile_create_variable_ndim(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfaname, mode='a') + v2 = ncf.create_variable('v2', np.dtype('f'), ('t', 'x')) + v2[:] = self.v2 + ncf.close() + ncfr = Nio.open_file(self.ncfaname) + actual = ncfr.variables['v2'][:] + expected = self.v2 + ncfr.close() + print_test_msg('NCFile.create_variable()', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile 2d-variable incorrect') + + def test_nio_NCFile_variable_append(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfaname, mode='a') + nt = self.ncdims['t'] + t = ncf.variables['t'] + t[nt:] = self.t2 + v = ncf.variables['v'] + v[nt:, :] = self.v2 + ncf.close() + ncfr = Nio.open_file(self.ncfaname) + actual = ncfr.variables['t'][:] + expected = np.concatenate((self.t, self.t2)) + print_test_msg('NCVariable append', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile t-variable incorrect') + actual = ncfr.variables['v'][:] + expected = np.concatenate((self.v, self.v2)) + print_test_msg('NCVariable append', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile 2d-variable incorrect') + ncfr.close() + + def test_nc4_NCFile_variable_append(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfaname, mode='a') + nt = self.ncdims['t'] + t = ncf.variables['t'] + t[nt:] = self.t2 + v = ncf.variables['v'] + v[nt:, :] = self.v2 + ncf.close() + ncfr = Nio.open_file(self.ncfaname) + actual = ncfr.variables['t'][:] + expected = np.concatenate((self.t, self.t2)) + print_test_msg('NCVariable append', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile t-variable incorrect') + actual = ncfr.variables['v'][:] + expected = np.concatenate((self.v, self.v2)) + print_test_msg('NCVariable append', actual=actual, expected=expected) + npt.assert_array_equal(actual, expected, + 'NCFile 2d-variable incorrect') + ncfr.close() + + def test_nio_NCVariable_setncattr(self): + iobackend.set_backend('Nio') + ncf = iobackend.NCFile(self.ncfaname, mode='a') + v = ncf.variables['v'] + for attr,value in self.vattrs2.iteritems(): + v.setncattr(attr, value) + ncf.close() + ncfr = Nio.open_file(self.ncfaname) + actual = ncfr.variables['v'].attributes + expected = self.vattrs + expected.update(self.vattrs2) + ncfr.close() + print_test_msg('NCVariable.setncattr()', actual=actual, expected=expected) + for attr, value in expected.iteritems(): + self.assertTrue(attr in actual, + 'Variable attribute {0!r} not found'.format(attr)) + self.assertEqual(actual[attr], value, + 'Variable attribute {0!r} incorrect'.format(attr)) + + def test_nc4_NCVariable_setncattr(self): + iobackend.set_backend('netCDF4') + ncf = iobackend.NCFile(self.ncfaname, mode='a') + v = ncf.variables['v'] + for attr,value in self.vattrs2.iteritems(): + v.setncattr(attr, value) + ncf.close() + ncfr = Nio.open_file(self.ncfaname) + actual = ncfr.variables['v'].attributes + expected = self.vattrs + expected.update(self.vattrs2) + ncfr.close() + print_test_msg('NCVariable.setncattr()', actual=actual, expected=expected) + for attr, value in expected.iteritems(): + self.assertTrue(attr in actual, + 'Variable attribute {0!r} not found'.format(attr)) + self.assertEqual(actual[attr], value, + 'Variable attribute {0!r} incorrect'.format(attr)) + + +#=============================================================================== +# CLI +#=============================================================================== +if __name__ == "__main__": + # import sys;sys.argv = ['', 'Test.testName'] + unittest.main() diff --git a/source/pyreshaper/test/mkTestData.py b/source/pyreshaper/test/mkTestData.py index 4a31ecd3..1f9a4552 100644 --- a/source/pyreshaper/test/mkTestData.py +++ b/source/pyreshaper/test/mkTestData.py @@ -1,5 +1,5 @@ """ -Copyright 2015, University Corporation for Atmospheric Research +Copyright 2016, University Corporation for Atmospheric Research See LICENSE.txt for details """ @@ -108,6 +108,9 @@ def _assert(key, value): return assertions ncout = Nio.open_file(outfile, 'r') + if 'meta1d' in kwds and kwds['meta1d'] is True: + metadata.append('time') + series_step = 0 for infile in infiles: _assert('{0!r} exists'.format(infile), os.path.exists(infile)) diff --git a/source/pyreshaper/test/s2sReshaperTests.py b/source/pyreshaper/test/reshaperTests.py similarity index 74% rename from source/pyreshaper/test/s2sReshaperTests.py rename to source/pyreshaper/test/reshaperTests.py index 851b50d9..2af5bc09 100644 --- a/source/pyreshaper/test/s2sReshaperTests.py +++ b/source/pyreshaper/test/reshaperTests.py @@ -1,7 +1,7 @@ """ Parallel Tests for the Reshaper class -Copyright 2015, University Corporation for Atmospheric Research +Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.rst file for details """ @@ -12,17 +12,18 @@ from cStringIO import StringIO from os import linesep as eol from os import remove +from os.path import exists from mpi4py import MPI -from pyreshaper.reshaper import Slice2SeriesReshaper, create_reshaper +from pyreshaper.reshaper import Reshaper, create_reshaper from pyreshaper.specification import Specifier import mkTestData -MPI_COMM_WORLD = MPI.COMM_WORLD +MPI_COMM_WORLD = MPI.COMM_WORLD # @UndefinedVariable -class S2SReshaperTests(unittest.TestCase): +class ReshaperTests(unittest.TestCase): def setUp(self): @@ -54,14 +55,17 @@ def _test_header(self, testname): def _convert_header(self, infiles, prefix, suffix, metadata, ncfmt, clevel, serial, verbosity, wmode, once, - print_diags=False): + print_diags=False, meta1d=False, tseries=None): nfiles = len(infiles) - ncvers = '3' if ncfmt == 'netcdf' else ('4c' if ncfmt == 'netcdf4c' - else '4') - self._test_header(("convert() - {0} infile(s), NC{1}-CL{2}, serial={3},{4}" - " verbosity={5}, wmode={6!r}, once={7}" + ncvers = '3' if ncfmt == 'netcdf' else ('4c' if ncfmt == 'netcdf4c' else '4') + if tseries is not None: + tsvstr = ', numtsv={0}'.format(len(tseries)) + else: + tsvstr = '' + self._test_header(("convert() - {0} infile(s), NC{1}-CL{2}, serial={3}, wmode={6!r},{4}" + " verbosity={5}, once={7}, meta1d={8}{9}" "").format(nfiles, ncvers, clevel, serial, eol, - verbosity, wmode, once)) + verbosity, wmode, once, meta1d, tsvstr)) def _assertion(self, name, actual, expected, data=None, show=True, assertion=None): @@ -79,14 +83,21 @@ def _assertion(self, name, actual, expected, else: self.assertEqual(actual, expected, msg) + def _check_outfile(self, tsvar, **args): + assertions_dict = mkTestData.check_outfile(tsvar=tsvar, **args) + failed_assertions = [key for key, value in assertions_dict.iteritems() if value is False] + assert_msgs = ['Output file check for variable {0!r}:'.format(tsvar)] + assert_msgs.extend([' {0}'.format(assrt) for assrt in failed_assertions]) + self.assertEqual(len(failed_assertions), 0, eol.join(assert_msgs)) + def _run_convert(self, infiles, prefix, suffix, metadata, ncfmt, clevel, serial, verbosity, wmode, once, - print_diags=False): + print_diags=False, meta1d=False, tseries=None): if not (serial and self.rank > 0): spec = Specifier(infiles=infiles, ncfmt=ncfmt, compression=clevel, - prefix=prefix, suffix=suffix, metadata=metadata) - rshpr = create_reshaper(spec, serial=serial, - verbosity=verbosity, + prefix=prefix, suffix=suffix, metadata=metadata, + meta1d=meta1d, timeseries=tseries) + rshpr = create_reshaper(spec, serial=serial, verbosity=verbosity, wmode=wmode, once=once) rshpr.convert() if print_diags: @@ -95,12 +106,13 @@ def _run_convert(self, infiles, prefix, suffix, metadata, def _run_convert_assert_no_output(self, infiles, prefix, suffix, metadata, ncfmt, clevel, serial, verbosity, wmode, - once, print_diags=False): + once, print_diags=False, meta1d=False, tseries=None): oldout = sys.stdout newout = StringIO() sys.stdout = newout self._run_convert(infiles, prefix, suffix, metadata, ncfmt, clevel, - serial, 0, wmode, once, print_diags=False) + serial, 0, wmode, once, print_diags=False, + meta1d=meta1d, tseries=tseries) actual = newout.getvalue() self._assertion("stdout empty", actual, '') sys.stdout = oldout @@ -115,7 +127,7 @@ def _test_create_reshaper(self, serial, verbosity, wmode): rshpr = create_reshaper(spec, serial=serial, verbosity=verbosity, wmode=wmode) self._assertion("type(reshaper)", type(rshpr), - Slice2SeriesReshaper) + Reshaper) def test_create_reshaper_serial_V0_W(self): self._test_create_reshaper(serial=True, verbosity=0, wmode='w') @@ -149,7 +161,39 @@ def test_convert_All_NC3_CL0_SER_V0_W(self): self._run_convert_assert_no_output(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) + MPI_COMM_WORLD.Barrier() + + def test_convert_All_NC3_CL0_SER_V0_W_M1D(self): + mdata = [v for v in mkTestData.tvmvars] + args = {'infiles': mkTestData.slices, 'prefix': 'out.', 'suffix': '.nc', + 'metadata': mdata, 'ncfmt': 'netcdf', 'clevel': 0, + 'serial': True, 'verbosity': 0, 'wmode': 'w', 'once': False, + 'print_diags': False, 'meta1d': True} + self._convert_header(**args) + self._run_convert_assert_no_output(**args) + if self.rank == 0: + for tsvar in mkTestData.tsvars: + self._check_outfile(tsvar=tsvar, **args) + MPI_COMM_WORLD.Barrier() + + def test_convert_All_NC3_CL0_SER_V1_W_TSER(self): + tsers = [mkTestData.tsvars[1], 'tsvarX'] + mdata = [v for v in mkTestData.tvmvars] + mdata.append('time') + args = {'infiles': mkTestData.slices, 'prefix': 'out.', 'suffix': '.nc', + 'metadata': mdata, 'ncfmt': 'netcdf', 'clevel': 0, + 'serial': True, 'verbosity': 1, 'wmode': 'w', 'once': False, + 'print_diags': False, 'tseries': tsers} + self._convert_header(**args) + self._run_convert(**args) + if self.rank == 0: + for tsvar in mkTestData.tsvars: + if tsvar in tsers: + self._check_outfile(tsvar=tsvar, **args) + else: + fname = args['prefix'] + tsvar + args['suffix'] + self.assertFalse(exists(fname), 'File {0!r} should not exist'.format(fname)) MPI_COMM_WORLD.Barrier() def test_convert_1_NC3_CL0_SER_V0_W(self): @@ -163,7 +207,7 @@ def test_convert_1_NC3_CL0_SER_V0_W(self): self._run_convert_assert_no_output(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_convert_All_NC4_CL1_SER_V0_W(self): @@ -177,7 +221,7 @@ def test_convert_All_NC4_CL1_SER_V0_W(self): self._run_convert_assert_no_output(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_convert_All_NC3_CL0_PAR_V1_W(self): @@ -191,7 +235,7 @@ def test_convert_All_NC3_CL0_PAR_V1_W(self): self._run_convert(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_convert_All_NC3_CL0_PAR_V1_W_ONCE(self): @@ -204,9 +248,9 @@ def test_convert_All_NC3_CL0_PAR_V1_W_ONCE(self): self._convert_header(**args) self._run_convert(**args) if self.rank == 0: - mkTestData.check_outfile(tsvar='once', **args) + self._check_outfile(tsvar='once', **args) for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_convert_All_NC3_CL0_PAR_V1_O(self): @@ -220,7 +264,7 @@ def test_convert_All_NC3_CL0_PAR_V1_O(self): self._run_convert(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_convert_All_NC3_CL0_PAR_V1_O_ONCE(self): @@ -233,9 +277,9 @@ def test_convert_All_NC3_CL0_PAR_V1_O_ONCE(self): self._convert_header(**args) self._run_convert(**args) if self.rank == 0: - mkTestData.check_outfile(tsvar='once', **args) + self._check_outfile(tsvar='once', **args) for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_convert_All_NC3_CL0_PAR_V1_S(self): @@ -249,7 +293,7 @@ def test_convert_All_NC3_CL0_PAR_V1_S(self): self._run_convert(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_convert_All_NC3_CL0_PAR_V1_S_ONCE(self): @@ -262,9 +306,9 @@ def test_convert_All_NC3_CL0_PAR_V1_S_ONCE(self): self._convert_header(**args) self._run_convert(**args) if self.rank == 0: - mkTestData.check_outfile(tsvar='once', **args) + self._check_outfile(tsvar='once', **args) for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_convert_All_NC3_CL0_PAR_V3_A(self): @@ -280,7 +324,7 @@ def test_convert_All_NC3_CL0_PAR_V3_A(self): self._run_convert(infiles=[infile], wmode='a', **args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) + self._check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_convert_All_NC3_CL0_PAR_V3_A_ONCE(self): @@ -295,9 +339,9 @@ def test_convert_All_NC3_CL0_PAR_V3_A_ONCE(self): for infile in mkTestData.slices[1:]: self._run_convert(infiles=[infile], wmode='a', **args) if self.rank == 0: - mkTestData.check_outfile(infiles=mkTestData.slices, tsvar='once', **args) + self._check_outfile(infiles=mkTestData.slices, tsvar='once', **args) for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) + self._check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_convert_All_NC3_CL0_PAR_V3_A_MISSING(self): @@ -317,9 +361,9 @@ def test_convert_All_NC3_CL0_PAR_V3_A_MISSING(self): if self.rank == 0: for tsvar in mkTestData.tsvars: if tsvar == missingvar: - mkTestData.check_outfile(infiles=mkTestData.slices[2:], tsvar=tsvar, **args) + self._check_outfile(infiles=mkTestData.slices[2:], tsvar=tsvar, **args) else: - mkTestData.check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) + self._check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() @@ -332,7 +376,7 @@ def test_convert_All_NC3_CL0_PAR_V3_A_MISSING(self): MPI_COMM_WORLD.Barrier() mystream = StringIO() - tests = unittest.TestLoader().loadTestsFromTestCase(S2SReshaperTests) + tests = unittest.TestLoader().loadTestsFromTestCase(ReshaperTests) unittest.TextTestRunner(stream=mystream).run(tests) MPI_COMM_WORLD.Barrier() diff --git a/source/pyreshaper/test/s2smakeTests.py b/source/pyreshaper/test/s2smakeTests.py index 0d8a6a2a..39dc307f 100644 --- a/source/pyreshaper/test/s2smakeTests.py +++ b/source/pyreshaper/test/s2smakeTests.py @@ -1,5 +1,5 @@ """ -Copyright 2015, University Corporation for Atmospheric Research +Copyright 2016, University Corporation for Atmospheric Research See LICENSE.txt for details """ @@ -39,8 +39,12 @@ def test_CLI_defaults(self): 'Default output prefix is not "tseries."') self.assertEqual(opts.output_suffix, '.nc', 'Default output suffix is not ".nc"') + self.assertEqual(opts.time_series, None, + 'Default time series names is not None') self.assertEqual(opts.specfile, 'input.s2s', 'Default output suffix is not ".nc"') + self.assertEqual(opts.meta1d, False, + 'Default 1D metadata flag is not False') def test_CLI_set_all_short(self): clevel = 3 @@ -51,13 +55,13 @@ def test_CLI_set_all_short(self): suffix = '.suffix' infiles = ['s2smakeTests.py', 'specificationTests.py'] - argv = ['-c', str(clevel), '-f', ncfmt] + argv = ['-1'] + argv.extend(['-c', str(clevel), '-f', ncfmt]) for md in metadata: argv.extend(['-m', md]) argv.extend(['-o', specfile, '-p', prefix, '-s', suffix]) argv.extend(infiles) opts, args = s2smake.cli(argv) - print opts.metadata self.assertEqual(opts.compression_level, clevel, 'Default compression level is not {0!r}'.format(clevel)) @@ -77,8 +81,12 @@ def test_CLI_set_all_short(self): 'Default output prefix is not {0!r}'.format(prefix)) self.assertEqual(opts.output_suffix, suffix, 'Default output suffix is not {0!r}'.format(suffix)) + self.assertEqual(opts.time_series, None, + 'Default time-series list is not None') self.assertEqual(opts.specfile, specfile, 'Default output suffix is not {0!r}'.format(specfile)) + self.assertEqual(opts.meta1d, True, + 'Default 1D metadata flag is not False') def test_CLI_set_all_long(self): clevel = 3 @@ -87,13 +95,17 @@ def test_CLI_set_all_long(self): specfile = 'myspec.s2s' prefix = 'prefix.' suffix = '.suffix' + tseries = ['tsvar1', 'tsvar2'] infiles = ['s2smakeTests.py', 'specificationTests.py'] - argv = ['--compression_level', str(clevel), '--netcdf_format', ncfmt] + argv = ['--meta1d'] + argv.extend(['--compression_level', str(clevel), '--netcdf_format', ncfmt]) for md in metadata: argv.extend(['--metadata', md]) argv.extend(['--specfile', specfile, '--output_prefix', prefix, '--output_suffix', suffix]) + for ts in tseries: + argv.extend(['--time_series', ts]) argv.extend(infiles) opts, args = s2smake.cli(argv) @@ -113,8 +125,13 @@ def test_CLI_set_all_long(self): 'Default output prefix is not {0!r}'.format(prefix)) self.assertEqual(opts.output_suffix, suffix, 'Default output suffix is not {0!r}'.format(suffix)) + for i1, i2 in zip(opts.time_series, tseries): + self.assertEqual(i1, i2, + 'Default time-series list is not {0}'.format(tseries)) self.assertEqual(opts.specfile, specfile, 'Default output suffix is not {0!r}'.format(specfile)) + self.assertEqual(opts.meta1d, True, + 'Default 1D metadata flag is not False') def test_main_defaults(self): argv = ['s2smakeTests.py'] @@ -142,6 +159,10 @@ def test_main_defaults(self): 'Default output prefix is not "tseries."') self.assertEqual(spec.output_file_suffix, '.nc', 'Default output suffix is not ".nc"') + self.assertEqual(spec.time_series, None, + 'Default NetCDF format is not None') + self.assertEqual(spec.assume_1d_time_variant_metadata, False, + 'Default 1D time-variant metadata flag is not False') def test_main_set_all_short(self): clevel = 3 @@ -152,7 +173,7 @@ def test_main_set_all_short(self): suffix = '.suffix' infiles = ['s2smakeTests.py', 'specificationTests.py'] - argv = ['-c', str(clevel), '-f', ncfmt] + argv = ['-1', '-c', str(clevel), '-f', ncfmt] for md in metadata: argv.extend(['-m', md]) argv.extend(['-o', specfile, '-p', prefix, '-s', suffix]) @@ -186,6 +207,10 @@ def test_main_set_all_short(self): 'Default output prefix is not {0!r}'.format(prefix)) self.assertEqual(spec.output_file_suffix, suffix + '.nc', 'Default output suffix is not {0!r}'.format(suffix)) + self.assertEqual(spec.time_series, None, + 'Default time series names is not None') + self.assertEqual(spec.assume_1d_time_variant_metadata, True, + 'Default 1D time-variant metadata flag is not True') def test_main_set_all_long(self): clevel = 3 @@ -194,13 +219,16 @@ def test_main_set_all_long(self): specfile = 'myspec.s2s' prefix = 'prefix.' suffix = '.suffix' + tseries = ['tsvar1', 'tsvar2'] infiles = ['s2smakeTests.py', 'specificationTests.py'] - argv = ['--compression_level', str(clevel), '--netcdf_format', ncfmt] + argv = ['--meta1d', '--compression_level', str(clevel), '--netcdf_format', ncfmt] for md in metadata: argv.extend(['--metadata', md]) argv.extend(['--specfile', specfile, '--output_prefix', prefix, '--output_suffix', suffix]) + for ts in tseries: + argv.extend(['--time_series', ts]) argv.extend(infiles) if os.path.exists(specfile): @@ -231,6 +259,11 @@ def test_main_set_all_long(self): 'Default output prefix is not {0!r}'.format(prefix)) self.assertEqual(spec.output_file_suffix, suffix + '.nc', 'Default output suffix is not {0!r}'.format(suffix)) + for i1, i2 in zip(spec.time_series, tseries): + self.assertEqual(i1, i2, + 'Default time-series list is not {0}'.format(tseries)) + self.assertEqual(spec.assume_1d_time_variant_metadata, True, + 'Default 1D time-variant metadata flag is not True') if __name__ == "__main__": unittest.main() diff --git a/source/pyreshaper/test/s2srunTests.py b/source/pyreshaper/test/s2srunTests.py index af872d9b..b1f2d92d 100644 --- a/source/pyreshaper/test/s2srunTests.py +++ b/source/pyreshaper/test/s2srunTests.py @@ -1,5 +1,5 @@ """ -Copyright 2015, University Corporation for Atmospheric Research +Copyright 2016, University Corporation for Atmospheric Research See LICENSE.txt for details """ @@ -10,6 +10,7 @@ from cStringIO import StringIO from os import linesep as eol from os import remove +from os.path import exists from mpi4py import MPI from pyreshaper.specification import Specifier @@ -18,7 +19,7 @@ s2srun = imp.load_source('s2srun', '../../../scripts/s2srun') -MPI_COMM_WORLD = MPI.COMM_WORLD +MPI_COMM_WORLD = MPI.COMM_WORLD # @UndefinedVariable class s2srunTest(unittest.TestCase): @@ -53,14 +54,18 @@ def _test_header(self, testname): def _convert_header(self, infiles, prefix, suffix, metadata, ncfmt, clevel, serial, verbosity, wmode, once, - print_diags=False): + print_diags=False, meta1d=False, tseries=None): nfiles = len(infiles) ncvers = '3' if ncfmt == 'netcdf' else ('4c' if ncfmt == 'netcdf4c' else '4') - self._test_header(("convert() - {0} infile(s), NC{1}-CL{2}, serial={3},{4}" - " verbosity={5}, wmode={6!r}, once={7}" + if tseries is not None: + tsvstr = ', ntsvar={0}'.format(len(tseries)) + else: + tsvstr = '' + self._test_header(("convert() - {0} infile(s), NC{1}-CL{2}, serial={3}{9},{4}" + " verbosity={5}, wmode={6!r}, once={7}, meta1d={8}" "").format(nfiles, ncvers, clevel, serial, eol, - verbosity, wmode, once)) + verbosity, wmode, once, meta1d, tsvstr)) def _assertion(self, name, actual, expected, data=None, show=True, assertion=None): @@ -78,16 +83,24 @@ def _assertion(self, name, actual, expected, else: self.assertEqual(actual, expected, msg) + def _check_outfile(self, tsvar, **args): + assertions_dict = mkTestData.check_outfile(tsvar=tsvar, **args) + failed_assertions = [key for key, value in assertions_dict.iteritems() if value is False] + assert_msgs = ['Output file check for variable {0!r}:'.format(tsvar)] + assert_msgs.extend([' {0}'.format(assrt) for assrt in failed_assertions]) + self.assertEqual(len(failed_assertions), 0, eol.join(assert_msgs)) + def _run_main(self, infiles, prefix, suffix, metadata, - ncfmt, clevel, serial, verbosity, wmode, once): + ncfmt, clevel, serial, verbosity, wmode, once, meta1d=False, tseries=None): if not (serial and self.rank > 0): spec = Specifier(infiles=infiles, ncfmt=ncfmt, compression=clevel, - prefix=prefix, suffix=suffix, metadata=metadata) + prefix=prefix, suffix=suffix, timeseries=tseries, + metadata=metadata, meta1d=meta1d) specfile = 'input.s2s' pickle.dump(spec, open(specfile, 'wb')) argv = ['-v', str(verbosity), '-m', wmode] if once: - argv.append('-s') + argv.append('-1') argv.append(specfile) s2srun.main(argv) remove(specfile) @@ -191,7 +204,37 @@ def test_main_All_NC3_CL0_SER_V0_W(self): self._run_main(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) + MPI_COMM_WORLD.Barrier() + + def test_main_All_NC3_CL0_SER_V0_W_M1D(self): + mdata = [v for v in mkTestData.tvmvars] + args = {'infiles': mkTestData.slices, 'prefix': 'out.', 'suffix': '.nc', + 'metadata': mdata, 'meta1d': True, 'ncfmt': 'netcdf', 'clevel': 0, + 'serial': True, 'verbosity': 0, 'wmode': 'w', 'once': False} + self._convert_header(**args) + self._run_main(**args) + if self.rank == 0: + for tsvar in mkTestData.tsvars: + self._check_outfile(tsvar=tsvar, **args) + MPI_COMM_WORLD.Barrier() + + def test_main_All_NC3_CL0_SER_V1_W_TSER(self): + mdata = [v for v in mkTestData.tvmvars] + tsers = mkTestData.tsvars[2:] + args = {'infiles': mkTestData.slices, 'prefix': 'out.', 'suffix': '.nc', + 'metadata': mdata, 'meta1d': True, 'ncfmt': 'netcdf', 'clevel': 0, + 'serial': True, 'verbosity': 1, 'wmode': 'w', 'once': False, + 'tseries': tsers} + self._convert_header(**args) + self._run_main(**args) + if self.rank == 0: + for tsvar in mkTestData.tsvars: + if tsvar in tsers: + self._check_outfile(tsvar=tsvar, **args) + else: + fname = args['prefix'] + tsvar + args['suffix'] + self.assertFalse(exists(fname), 'File {0!r} should not exist'.format(fname)) MPI_COMM_WORLD.Barrier() def test_main_1_NC3_CL0_SER_V0_W(self): @@ -204,7 +247,7 @@ def test_main_1_NC3_CL0_SER_V0_W(self): self._run_main(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_main_All_NC4_CL1_SER_V0_W(self): @@ -217,7 +260,7 @@ def test_main_All_NC4_CL1_SER_V0_W(self): self._run_main(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_main_All_NC3_CL0_PAR_V1_W(self): @@ -230,7 +273,7 @@ def test_main_All_NC3_CL0_PAR_V1_W(self): self._run_main(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_main_All_NC3_CL0_PAR_V1_W_ONCE(self): @@ -242,9 +285,9 @@ def test_main_All_NC3_CL0_PAR_V1_W_ONCE(self): self._convert_header(**args) self._run_main(**args) if self.rank == 0: - mkTestData.check_outfile(tsvar='once', **args) + self._check_outfile(tsvar='once', **args) for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_main_All_NC3_CL0_PAR_V1_O(self): @@ -257,7 +300,7 @@ def test_main_All_NC3_CL0_PAR_V1_O(self): self._run_main(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_main_All_NC3_CL0_PAR_V1_O_ONCE(self): @@ -269,9 +312,9 @@ def test_main_All_NC3_CL0_PAR_V1_O_ONCE(self): self._convert_header(**args) self._run_main(**args) if self.rank == 0: - mkTestData.check_outfile(tsvar='once', **args) + self._check_outfile(tsvar='once', **args) for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_main_All_NC3_CL0_PAR_V1_S(self): @@ -284,7 +327,7 @@ def test_main_All_NC3_CL0_PAR_V1_S(self): self._run_main(**args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_main_All_NC3_CL0_PAR_V1_S_ONCE(self): @@ -296,9 +339,9 @@ def test_main_All_NC3_CL0_PAR_V1_S_ONCE(self): self._convert_header(**args) self._run_main(**args) if self.rank == 0: - mkTestData.check_outfile(tsvar='once', **args) + self._check_outfile(tsvar='once', **args) for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(tsvar=tsvar, **args) + self._check_outfile(tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_main_All_NC3_CL0_PAR_V3_A(self): @@ -313,7 +356,7 @@ def test_main_All_NC3_CL0_PAR_V3_A(self): self._run_main(infiles=[infile], wmode='a', **args) if self.rank == 0: for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) + self._check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_main_All_NC3_CL0_PAR_V3_A_ONCE(self): @@ -327,9 +370,9 @@ def test_main_All_NC3_CL0_PAR_V3_A_ONCE(self): for infile in mkTestData.slices[1:]: self._run_main(infiles=[infile], wmode='a', **args) if self.rank == 0: - mkTestData.check_outfile(infiles=mkTestData.slices, tsvar='once', **args) + self._check_outfile(infiles=mkTestData.slices, tsvar='once', **args) for tsvar in mkTestData.tsvars: - mkTestData.check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) + self._check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() def test_main_All_NC3_CL0_PAR_V3_A_MISSING(self): @@ -348,9 +391,9 @@ def test_main_All_NC3_CL0_PAR_V3_A_MISSING(self): if self.rank == 0: for tsvar in mkTestData.tsvars: if tsvar == missingvar: - mkTestData.check_outfile(infiles=mkTestData.slices[2:], tsvar=tsvar, **args) + self._check_outfile(infiles=mkTestData.slices[2:], tsvar=tsvar, **args) else: - mkTestData.check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) + self._check_outfile(infiles=mkTestData.slices, tsvar=tsvar, **args) MPI_COMM_WORLD.Barrier() diff --git a/source/pyreshaper/test/specificationTests.py b/source/pyreshaper/test/specificationTests.py index 5282393d..b9cdb0cf 100644 --- a/source/pyreshaper/test/specificationTests.py +++ b/source/pyreshaper/test/specificationTests.py @@ -1,7 +1,7 @@ """ Unit tests for the Specifier class -Copyright 2015, University Corporation for Atmospheric Research +Copyright 2016, University Corporation for Atmospheric Research See the LICENSE.rst file for details """ @@ -32,8 +32,14 @@ def test_init(self): 'Output file prefix not initialized to tseries.') self.assertEqual(spec.output_file_suffix, '.nc', 'Output file prefix not initialized to .nc') + self.assertEqual(spec.time_series, None, + 'Time-series variables list is not initialized to None') self.assertEqual(len(spec.time_variant_metadata), 0, 'Time variant metadata list not initialized to empty') + self.assertEqual(spec.assume_1d_time_variant_metadata, False, + 'Time-variable 1D metadata flag is not initialized to False') + self.assertEqual(spec.io_backend, 'netCDF4', + 'I/O backend not initialized to netCDF4') def test_init_full(self): in_list = ['a', 'b', 'c'] @@ -41,13 +47,19 @@ def test_init_full(self): cl = 4 prefix = 'pre.' suffix = '.suf.nc' + tseries = ['1', '2'] metadata = ['x', 'y', 'z'] + meta1d = True + backend = 'Nio' spec = specification.Specifier( infiles=in_list, ncfmt=fmt, compression=cl, prefix=prefix, - suffix=suffix, metadata=metadata) + suffix=suffix, timeseries=tseries, metadata=metadata, + meta1d=meta1d, backend=backend) for i1, i2 in zip(spec.input_file_list, in_list): self.assertEqual(i1, i2, 'Input file list not initialized properly') + self.assertEqual(spec.io_backend, backend, + 'NetCDF I/O backend not set properly') self.assertEqual(spec.netcdf_format, fmt, 'NetCDF format not initialized properly') self.assertEqual(spec.compression_level, cl, @@ -56,9 +68,20 @@ def test_init_full(self): 'Output file prefix not initialized properly') self.assertEqual(spec.output_file_suffix, suffix, 'Output file prefix not initialized properly') - for i1,i2 in zip(spec.time_variant_metadata, metadata): + for i1, i2 in zip(spec.time_series, tseries): self.assertEqual(i1, i2, - 'Time variant metadata list not initialized properly') + 'Time-series list not initialized properly') + for i1, i2 in zip(spec.time_variant_metadata, metadata): + self.assertEqual(i1, i2, + 'Time-variant metadata list not initialized properly') + self.assertEqual(spec.assume_1d_time_variant_metadata, meta1d, + '1D metadata flag not initialized properly') + + + def test_validate_types_defaults(self): + in_list = ['a', 'b', 'c'] + spec = specification.Specifier(infiles=in_list) + spec.validate_types() def test_validate_types(self): in_list = ['a', 'b', 'c'] @@ -66,10 +89,11 @@ def test_validate_types(self): cl = 3 prefix = 'pre.' suffix = '.suf.nc' + tseries = ['1', '2'] metadata = ['x', 'y', 'z'] spec = specification.Specifier( infiles=in_list, ncfmt=fmt, compression=cl, prefix=prefix, - suffix=suffix, metadata=metadata) + suffix=suffix, timeseries=tseries, metadata=metadata, meta1d=True) spec.validate_types() def test_validate_types_fail_input(self): @@ -84,6 +108,19 @@ def test_validate_types_fail_input(self): suffix=suffix, metadata=metadata) self.assertRaises(TypeError, spec.validate_types) + def test_validate_types_fail_backend(self): + in_list = ['a', 'b', 'c'] + fmt = 2342 + cl = 5 + prefix = 'pre.' + suffix = '.suf.nc' + metadata = ['x', 'y', 'z'] + backend = 1 + spec = specification.Specifier( + infiles=in_list, ncfmt=fmt, compression=cl, prefix=prefix, + suffix=suffix, metadata=metadata, backend=backend) + self.assertRaises(TypeError, spec.validate_types) + def test_validate_types_fail_format(self): in_list = ['a', 'b', 'c'] fmt = 2342 @@ -132,6 +169,19 @@ def test_validate_types_fail_suffix(self): suffix=suffix, metadata=metadata) self.assertRaises(TypeError, spec.validate_types) + def test_validate_types_fail_timeseries(self): + in_list = ['a', 'b', 'c'] + fmt = 'netcdf' + cl = 5 + prefix = 'pre.' + suffix = '.suf.nc' + tseries = ['1', 2.5] + metadata = ['x', 'y', 'z'] + spec = specification.Specifier( + infiles=in_list, ncfmt=fmt, compression=cl, prefix=prefix, + suffix=suffix, timeseries=tseries, metadata=metadata) + self.assertRaises(TypeError, spec.validate_types) + def test_validate_types_fail_metadata(self): in_list = ['a', 'b', 'c'] fmt = 'netcdf' @@ -144,6 +194,19 @@ def test_validate_types_fail_metadata(self): suffix=suffix, metadata=metadata) self.assertRaises(TypeError, spec.validate_types) + def test_validate_types_fail_meta1d(self): + in_list = ['a', 'b', 'c'] + fmt = 'netcdf' + cl = 5 + prefix = 'pre.' + suffix = '.suf.nc' + metadata = ['x', 'y', 'z'] + meta1d = 't' + spec = specification.Specifier( + infiles=in_list, ncfmt=fmt, compression=cl, prefix=prefix, + suffix=suffix, meta1d=meta1d, metadata=metadata) + self.assertRaises(TypeError, spec.validate_types) + def test_validate_values_fail_input(self): in_list = ['a', 'b', 'c'] fmt = 'netcdf' @@ -157,6 +220,20 @@ def test_validate_values_fail_input(self): spec.validate_types() self.assertRaises(ValueError, spec.validate_values) + def test_validate_values_fail_backend(self): + in_list = ['timekeeperTests.py', 'messengerTests.py'] + fmt = 'netcdf9' + cl = 5 + prefix = 'pre.' + suffix = '.suf.nc' + metadata = [] + backend = 'x' + spec = specification.Specifier( + infiles=in_list, ncfmt=fmt, compression=cl, prefix=prefix, + suffix=suffix, metadata=metadata, backend=backend) + spec.validate_types() + self.assertRaises(ValueError, spec.validate_values) + def test_validate_values_fail_format(self): in_list = ['timekeeperTests.py', 'messengerTests.py'] fmt = 'netcdf9' @@ -215,15 +292,16 @@ def test_write(self): cl = 8 prefix = 'pre.' suffix = '.suf.nc' + tseries = ['1', '2'] metadata = ['time'] spec = specification.Specifier( infiles=in_list, ncfmt=fmt, compression=cl, prefix=prefix, - suffix=suffix, metadata=metadata) + suffix=suffix, timeseries=tseries, metadata=metadata) fname = 'test_write.s2s' spec.write(fname) self.assertTrue(os.path.exists(fname), 'Specfile failed to write') spec2 = pickle.load(open(fname, 'r')) - for i1,i2 in zip(spec2.input_file_list, in_list): + for i1, i2 in zip(spec2.input_file_list, in_list): self.assertEqual(i1, i2, 'Input file list not initialized properly') self.assertEqual(spec2.netcdf_format, fmt, @@ -234,7 +312,10 @@ def test_write(self): 'Output file prefix not initialized properly') self.assertEqual(spec2.output_file_suffix, suffix, 'Output file prefix not initialized properly') - for i1,i2 in zip(spec2.time_variant_metadata, metadata): + for i1, i2 in zip(spec2.time_series, tseries): + self.assertEqual(i1, i2, + 'Time series name list not initialized properly') + for i1, i2 in zip(spec2.time_variant_metadata, metadata): self.assertEqual(i1, i2, 'Time variant metadata list not initialized properly') os.remove(fname) diff --git a/source/pyreshaper/version.py b/source/pyreshaper/version.py index de9076fd..d6f3b153 100644 --- a/source/pyreshaper/version.py +++ b/source/pyreshaper/version.py @@ -1,2 +1,2 @@ # Single place for version information -__version__ = '0.9.10' +__version__ = '1.0.0' diff --git a/tests/yellowstone/.gitignore b/tests/yellowstone/.gitignore new file mode 100644 index 00000000..0482cb4e --- /dev/null +++ b/tests/yellowstone/.gitignore @@ -0,0 +1,2 @@ +results + diff --git a/tests/yellowstone/checkresults.py b/tests/yellowstone/checkresults.py index 23ffa003..490a2fbc 100755 --- a/tests/yellowstone/checkresults.py +++ b/tests/yellowstone/checkresults.py @@ -10,7 +10,7 @@ import re import sys import glob -import argparse +import optparse from subprocess import Popen, PIPE, STDOUT, call # ASAP Toolbox Modules @@ -23,33 +23,27 @@ # Command-Line Interface Definition #============================================================================== _DESC_ = 'Check the results of tests found in the rundirs directory.' -_PARSER_ = argparse.ArgumentParser(description=_DESC_) -_PARSER_.add_argument('-c', '--code', default='STDD0002', type=str, - help='The name of the project code for charging in ' - 'parallel runs (ignored if running in serial) ' - '[Default: STDD0002]') -_PARSER_.add_argument('-i', '--infofile', default='testinfo.json', - help='Location of the testinfo.json file ' - '[Default: testinfo.json]') -_PARSER_.add_argument('-l', '--list', default=False, - action='store_true', dest='list_tests', - help='True or False, indicating whether to list all ' - 'tests that have been run with resulting output, ' - 'instead of actually comparing any tests. ' - '[Default: False]') -_PARSER_.add_argument('-m', '--multiple', default=False, - action='store_true', dest='multispec', - help='True or False, indicating whether to look for ' - 'multispec results [Default: False]') -_PARSER_.add_argument('-s', '--serial', default=False, - action='store_true', dest='serial', - help='True or False, indicating whether to run checks ' - 'serial or not [Default: False]') -_PARSER_.add_argument('-x', '--executable', type=str, - default='/glade/p/work/kpaul/installs/intel/12.1.5/cprnc/bin/cprnc', - help='The path to the CPRNC executable.') -_PARSER_.add_argument('rundir', type=str, nargs='*', - help='Name of a test run directory to check') +_PARSER_ = optparse.OptionParser(description=_DESC_) +_PARSER_.add_option('-c', '--code', default='STDD0002', type='string', + help=('The name of the project code for charging in ' + 'parallel runs (ignored if running in serial) ' + '[Default: STDD0002]')) +_PARSER_.add_option('-i', '--infofile', default='testinfo.json', + help=('Location of the testinfo.json file ' + '[Default: testinfo.json]')) +_PARSER_.add_option('-l', '--list', default=False, + action='store_true', dest='list_tests', + help=('True or False, indicating whether to list all ' + 'tests that have been run with resulting output, ' + 'instead of actually comparing any tests. ' + '[Default: False]')) +_PARSER_.add_option('-s', '--serial', default=False, + action='store_true', dest='serial', + help=('True or False, indicating whether to run checks ' + 'serial or not [Default: False]')) +_PARSER_.add_option('-x', '--executable', type='string', + default='/glade/p/work/kpaul/installs/intel/12.1.5/cprnc/bin/cprnc', + help='The path to the CPRNC executable.') #============================================================================== @@ -139,24 +133,20 @@ def compare(self, nc1, nc2, outfile=None, alltimes=True, verbose=False): # Command-Line Operation #============================================================================== if __name__ == '__main__': - args = _PARSER_.parse_args() + opts, args = _PARSER_.parse_args() # Create/read the testing info and stats files - testdb = tt.TestDB(name=args.infofile).getdb() + testdb = tt.TestDB(name=opts.infofile).getdb() # Get a list of valid rundir names to look for - if len(args.rundir) > 0: - rundirs = args.rundir + if len(args) > 0: + rundirs = args else: rundirs = glob.glob(os.path.join('results', '*', '[ser,par]*', '*')) # Get the list of valid run names and the output directory pattern - if args.multispec: - valid_runnames = ['multitest'] - outdir_pattern = os.path.join('output', '*') - else: - valid_runnames = testdb.keys() - outdir_pattern = 'output' + valid_runnames = testdb.keys() + outdir_pattern = 'output' # Find valid tests for comparison tests_to_check = {} @@ -235,15 +225,11 @@ def compare(self, nc1, nc2, outfile=None, alltimes=True, verbose=False): unchecked_old_items.append(item_dict) # Get a basic MPI comm - comm = create_comm(serial=(args.serial or args.list_tests)) + comm = create_comm(serial=(opts.serial or opts.list_tests)) # Print tests that will be checked if comm.is_manager(): - if args.multispec: - print 'Checking multitest results.' - else: - print 'Checking individual test results.' - print + print 'Checking test results.' for test_name in tests_to_check: print 'Test {0!s}:'.format(test_name) @@ -257,7 +243,7 @@ def compare(self, nc1, nc2, outfile=None, alltimes=True, verbose=False): print 'old files found.' # Quit now, if just listing tests to be checked - if args.list_tests: + if opts.list_tests: sys.exit(1) # For each test to be compared, generate the cprnc output directories @@ -272,7 +258,7 @@ def compare(self, nc1, nc2, outfile=None, alltimes=True, verbose=False): comm.sync() # Create the CPRNC object - cprnc = CPRNC(args.executable) + cprnc = CPRNC(opts.executable) # For each file on this partition, do the CPRNC Comparison local_results = [] diff --git a/tests/yellowstone/init.sh b/tests/yellowstone/init.sh index ffe96f1b..6e4d0102 100755 --- a/tests/yellowstone/init.sh +++ b/tests/yellowstone/init.sh @@ -1,5 +1,8 @@ #!/bin/bash +module load python +module load all-python-libs + # Function to return the absolute path function abspath { cd $1 diff --git a/tests/yellowstone/mkplots.py b/tests/yellowstone/mkplots.py index 5a5d2d8e..57c85640 100755 --- a/tests/yellowstone/mkplots.py +++ b/tests/yellowstone/mkplots.py @@ -20,31 +20,43 @@ # #============================================================================== -import argparse +import optparse +import numpy from utilities import plottools as pt #============================================================================== # Command-Line Interface Definition #============================================================================== -_DESC_ = 'Create throughput and duration plots of timing data' -_PARSER_ = argparse.ArgumentParser(description=_DESC_) -_PARSER_.add_argument('-t', '--timefile', type=str, - default='timings.json', - help='Path to the timings.json file ' - '[Default: "timings.json"]') -_PARSER_.add_argument('-m', '--method', action='append', type=str, - dest='methods', default=[], - help='Include a method to plot, by name. If no methods ' - 'are listed, then include all methods found') -_PARSER_.add_argument('-x', '--exclusive', action='store_true', - dest='exclusive', default=False, - help='Option indicating that datasets that do not ' - 'include all of the desired methods should not ' - 'be plotted [Default: False]') -_PARSER_.add_argument('dataset', nargs='*', default=[], - help='Dataset to be plotted. If none are listed, ' - 'assume all datasets found will be plotted.') +_USAGE_ = 'Usage: %prog [options] [DATASET1 [DATASET2 [...]]]' + +_DESC_ = """Create throughput and duration plots of timing data""" + +_PARSER_ = optparse.OptionParser(description=_DESC_) +_PARSER_.add_option('-d', '--data', action='append', type='string', + dest='data', default=[], + help=('Only plot jobs that match the given data ' + '(in the format NAME,TYPE,VALUE, where TYPE ' + 'can be int, float, str, or bool)')) +_PARSER_.add_option('-f', '--func', type='string', default='latest', + help=('Which function to use when determining which ' + 'jobs to plot for each dataset and method. Can ' + 'be average, latest, or first')) +_PARSER_.add_option('-l', '--log', action='store_true', default=False, + help='Plot graphs on a log-scale') +_PARSER_.add_option('-m', '--method', action='append', type='string', + dest='methods', default=[], + help=('Include a method to plot, by name. If no methods ' + 'are listed, then include all methods found')) +_PARSER_.add_option('-t', '--timefile', type='string', + default='timings.json', + help=('Path to the timings.json file ' + '[Default: "timings.json"]')) +_PARSER_.add_option('-x', '--exclusive', action='store_true', + dest='exclusive', default=False, + help=('Option indicating that datasets that do not ' + 'include all of the desired methods should not ' + 'be plotted [Default: False]')) #============================================================================== # Some common data for plotting @@ -69,6 +81,9 @@ 'pyniompi4_1': 'PyNIO+mpi4py (NetCDF4-CL1)', 'pagoda': 'Pagoda', 'cdo': 'CDO', + 'pyreshaper-v0': 'PyReshaper v0.9 (NetCDF3)', + 'pyreshaper4-v0': 'PyReshaper v0.9 (NetCDF4)', + 'pyreshaper4c-v0': 'PyReshaper v0.9 (NetCDF4-CL1)', 'pyreshaper': 'PyReshaper (NetCDF3)', 'pyreshaper4': 'PyReshaper (NetCDF4)', 'pyreshaper4c': 'PyReshaper (NetCDF4-CL1)'} @@ -83,6 +98,9 @@ 'pyniompi4_1': 'blue', 'pagoda': 'yellow', 'cdo': 'cyan', + 'pyreshaper-v0': 'magenta', + 'pyreshaper4-v0': 'red', + 'pyreshaper4c-v0': 'cyan', 'pyreshaper': 'purple', 'pyreshaper4': 'green', 'pyreshaper4c': 'blue'} @@ -90,25 +108,45 @@ 'pynio', 'pynio4_0', 'pynio4_1', 'ncr', 'pyniompi', 'pyniompi4_0', 'pyniompi4_1', + 'pyreshaper-v0', 'pyreshaper4-v0', 'pyreshaper4c-v0', 'pyreshaper', 'pyreshaper4', 'pyreshaper4c'] +data_types = {'int': int, + 'float': float, + 'str': str, + 'bool': bool} #============================================================================== # Command-line Operation #============================================================================== if __name__ == '__main__': - args = _PARSER_.parse_args() + opts, datasets = _PARSER_.parse_args() + + # Reduce function + if opts.func.lower() == 'average': + reduce_func = numpy.average + elif opts.func.lower() == 'latest': + reduce_func = lambda x: x[-1] + elif opts.func.lower() == 'first': + reduce_func = lambda x: x[0] + else: + raise ValueError('Reduce function {0} is not average, latest, or first'.format(opts.func)) # Read the data file - jsondata = pt.read_json_data(args.timefile) + jsondata = pt.read_json_data(opts.timefile) if jsondata is None: raise ValueError('Could not find timings JSON data file.') + + # Parse the subselection data options + subdata = {} + for ds in opts.data: + dname, dtype, dval = ds.split(',') + subdata[dname] = data_types[dtype.lower()](dval) # Initialize the data to the entire file contents data = jsondata # If datasets are listed, extract only them - datasets = args.dataset if (len(datasets) > 0): datasets_to_plot = [] for dataset in datasets: @@ -117,31 +155,49 @@ data = pt.subselect_datasets(data, datasets=datasets_to_plot) # If methods are listed, extract only them - methods = args.methods + methods = opts.methods if (len(methods) > 0): methods_to_plot = [] for method in methods: if method in method_order: methods_to_plot.append(method) data = pt.subselect_methods(data, methods=methods_to_plot, - exclusive=args.exclusive) + exclusive=opts.exclusive) + + # Subselect the jobs by time and data criteria + data = pt.subselect_jobs_by_data(data, [subdata]) # THROUGHPUT PLOTS - tdata = pt.get_throughput_pdata(data) + tdata = pt.reduce_pdata(pt.get_throughput_pdata(data), func=reduce_func) pt.make_bar_plot(tdata, 'throughput.pdf', title='Throughput', ylabel='Throughput [MB/sec]', dataset_order=dataset_order, method_order=method_order, method_colors=method_colors, dataset_labels=dataset_labels, - method_labels=method_labels) + method_labels=method_labels, + logplot=opts.log) # DURATION PLOTS - ddata = pt.get_duration_pdata(data) + ddata = pt.reduce_pdata(pt.get_duration_pdata(data), func=reduce_func) pt.make_bar_plot(ddata, 'duration.pdf', title='Duration', ylabel='Duration [min]', dataset_order=dataset_order, method_order=method_order, method_colors=method_colors, dataset_labels=dataset_labels, - method_labels=method_labels) + method_labels=method_labels, + logplot=opts.log) + + # SPEEDUP PLOTS + over_method = methods_to_plot[0] + sdata = pt.get_speedup_pdata(tdata, over_method) + pt.make_bar_plot(sdata, 'speedup.pdf', + title='Speedup over {0}'.format(method_labels[over_method]), + ylabel='Speedup', + dataset_order=dataset_order, + method_order=method_order, + method_colors=method_colors, + dataset_labels=dataset_labels, + method_labels=method_labels, + logplot=opts.log) diff --git a/tests/yellowstone/mkstats.py b/tests/yellowstone/mkstats.py index 4fc0b044..71eaa427 100755 --- a/tests/yellowstone/mkstats.py +++ b/tests/yellowstone/mkstats.py @@ -9,7 +9,7 @@ # Builtin Modules import sys -import argparse +import optparse # Package Modules from utilities import testtools as tt @@ -17,60 +17,61 @@ #============================================================================== # Command-Line Interface Definition #============================================================================== +_USAGE_ = 'Usage: %prog [options] [TEST1 [TEST2 [...]]]' + _DESC_ = """This program is designed to gather statistics for tests and - test input defined in the testing database file.""" +test input defined in the testing database file. +""" -_PARSER_ = argparse.ArgumentParser(description=_DESC_) -_PARSER_.add_argument('-a', '--all', default=False, - action='store_true', dest='all_tests', - help='True or False, indicating whether to run all ' - 'tests [Default: False]') -_PARSER_.add_argument('-i', '--infofile', default='testinfo.json', type=str, - help='Location of the testinfo.json database file ' - '[Default: testinfo.json]') -_PARSER_.add_argument('-l', '--list', default=False, - action='store_true', dest='list_tests', - help='True or False, indicating whether to list all ' - 'tests, instead of running tests. [Default: False]') -_PARSER_.add_argument('-o', '--overwrite', default=False, - action='store_true', dest='overwrite', - help='True or False, indicating whether to force ' - 'deleting any existing test or run directories, ' - 'if found [Default: False]') -_PARSER_.add_argument('-s', '--statsfile', default='teststats.json', type=str, - help='Location of the teststats.json database file ' - '[Default: teststats.json]') -_PARSER_.add_argument('test', type=str, nargs='*', - help='Name of test to analyze') +_PARSER_ = optparse.OptionParser(description=_DESC_) +_PARSER_.add_option('-a', '--all', default=False, + action='store_true', dest='all_tests', + help=('True or False, indicating whether to run all ' + 'tests [Default: False]')) +_PARSER_.add_option('-i', '--infofile', default='testinfo.json', type='string', + help=('Location of the testinfo.json database file ' + '[Default: testinfo.json]')) +_PARSER_.add_option('-l', '--list', default=False, + action='store_true', dest='list_tests', + help=('True or False, indicating whether to list all ' + 'tests, instead of running tests. [Default: False]')) +_PARSER_.add_option('-o', '--overwrite', default=False, + action='store_true', dest='overwrite', + help=('True or False, indicating whether to force ' + 'deleting any existing test or run directories, ' + 'if found [Default: False]')) +_PARSER_.add_option('-s', '--statsfile', default='teststats.json', type='string', + help=('Location of the teststats.json database file ' + '[Default: teststats.json]')) #============================================================================== # Main Command-line Operation #============================================================================== if __name__ == '__main__': - args = _PARSER_.parse_args() + opts, args = _PARSER_.parse_args() # Check for tests to analyze - if len(args.test) == 0 and not args.all_tests and not args.list_tests: + if len(args) == 0 and not opts.all_tests and not opts.list_tests: _PARSER_.print_help() sys.exit(1) # Create/read the testing info and stats files - testdb = tt.TestDB(name=args.infofile) - statdb = tt.StatDB(name=args.statsfile) + testdb = tt.TestDB(name=opts.infofile) + statdb = tt.StatDB(name=opts.statsfile) # List tests if only listing - if args.list_tests: + if opts.list_tests: testdb.display() sys.exit(1) # Generate the list of tests to run/analyze - if args.all_tests: + if opts.all_tests: test_list = testdb.getdb().keys() else: - test_list = [t for t in args.test if t in testdb.getdb()] + test_list = [t for t in args if t in testdb.getdb()] # Analyze test input, if requested (overwrite forces re-analysis) - statdb.analyze(testdb, tests=test_list, force=args.overwrite) + statdb.analyze(testdb, tests=test_list, force=opts.overwrite) # Save to the stats file - statdb.save(name=args.statsfile) + statdb.save(name=opts.statsfile) diff --git a/tests/yellowstone/mktimings.py b/tests/yellowstone/mktimings.py index af4e1839..dfc2c82c 100755 --- a/tests/yellowstone/mktimings.py +++ b/tests/yellowstone/mktimings.py @@ -11,7 +11,7 @@ import os import glob import datetime -import argparse +import optparse # Package Modules from utilities import testtools as tt @@ -20,15 +20,19 @@ # Command-Line Interface Definition #============================================================================== _DESC_ = """This program is designed to gather statistics for tests and - test input defined in the testing database file.""" +test input defined in the testing database file. +""" -_PARSER_ = argparse.ArgumentParser(description=_DESC_) -_PARSER_.add_argument('-i', '--infofile', default='testinfo.json', type=str, - help='Location of the testinfo.json database file ' - '[Default: testinfo.json]') -_PARSER_.add_argument('-t', '--timefile', default='timings.json', type=str, - help='Location of the timings.json database file ' - '[Default: timings.json]') +_PARSER_ = optparse.OptionParser(description=_DESC_) +_PARSER_.add_option('-i', '--infofile', default='testinfo.json', type="string", + help=('Location of the testinfo.json database file ' + '[Default: testinfo.json]')) +_PARSER_.add_option('-t', '--timefile', default='timings.json', type="string", + help=('Location of the input timings.json database file ' + '[Default: timings.json]')) +_PARSER_.add_option('-o', '--outfile', default='timings.json', type="string", + help=('The name of the output timings file to write.' + '[Default: timings.json]')) #============================================================================== @@ -47,11 +51,11 @@ def find_shortest_str(strng, left, right=os.linesep, loc=0): # Command-line Operation #============================================================================== if __name__ == '__main__': - args = _PARSER_.parse_args() + opts, args = _PARSER_.parse_args() # Create/read the testing info and stats files - testdb = tt.TestDB(name=args.infofile).getdb() - timedb = tt.TimeDB(name=args.timefile) + testdb = tt.TestDB(name=opts.infofile).getdb() + timedb = tt.TimeDB(name=opts.timefile) # Current working directory cwd = os.getcwd() @@ -188,4 +192,4 @@ def find_shortest_str(strng, left, right=os.linesep, loc=0): system='yellowstone') # Write the JSON data file - timedb.save(args.timefile) + timedb.save(opts.outfile) diff --git a/tests/yellowstone/runtests.py b/tests/yellowstone/runtests.py index 2b339064..87cd18d0 100755 --- a/tests/yellowstone/runtests.py +++ b/tests/yellowstone/runtests.py @@ -13,7 +13,7 @@ import sys import stat import shutil -import argparse +import optparse import cPickle as pickle # Package Modules @@ -23,75 +23,64 @@ #============================================================================== # Command-Line Interface Definition #============================================================================== +_USAGE_ = 'Usage: %prog [options] [TEST1 [TEST2 [...]]]' + _DESC_ = """This program is designed to run yellowstone-specific - tests of the PyReshaper. Each named test (or all tests if - the -a or --all option is used) will be given a run - directory in the "rundirs" directory with the same - name as the test itself. The run script will be placed - in this run directory, as will be placed the run output - error file. All output data files will be placed in the - output subdirectory.""" - -_PARSER_ = argparse.ArgumentParser(description=_DESC_) -_PARSER_.add_argument('-a', '--all', default=False, - action='store_true', dest='all_tests', - help='True or False, indicating whether to run all ' - 'tests [Default: False]') -_PARSER_.add_argument('-c', '--code', default='STDD0002', type=str, - help='The name of the project code for charging in ' - 'parallel runs (ignored if running in serial) ' - '[Default: STDD0002]') -_PARSER_.add_argument('-i', '--infofile', default='testinfo.json', type=str, - help='Location of the testinfo.json database file ' - '[Default: testinfo.json]') -_PARSER_.add_argument('-f', '--format', default='netcdf4c', - type=str, dest='ncformat', - help='The NetCDF file format to use for the output ' - 'data produced by the test. [Default: netcdf4c]') -_PARSER_.add_argument('-l', '--list', default=False, - action='store_true', dest='list_tests', - help='True or False, indicating whether to list all ' - 'tests, instead of running tests. [Default: False]') -_PARSER_.add_argument('-m', '--multiple', default=False, - action='store_true', dest='multispec', - help='True or False, indications whether the tests ' - 'should be run from a single Reshaper submission ' - '(i.e., multiple Specifiers in one run) ' - '[Default: False]') -_PARSER_.add_argument('-n', '--nodes', default=0, type=int, - help='The integer number of nodes to request in parallel' - ' runs (0 means run in serial) [Default: 0]') -_PARSER_.add_argument('-o', '--overwrite', default=False, - action='store_true', dest='overwrite', - help='True or False, indicating whether to force ' - 'deleting any existing test or run directories, ' - 'if found [Default: False]') -_PARSER_.add_argument('-q', '--queue', default='economy', type=str, - help='The name of the queue to request in parallel runs ' - '(ignored if running in serial) ' - '[Default: economy]') -_PARSER_.add_argument('-s', '--skip_existing', default=False, - action='store_true', - help='Whether to skip time-series generation for ' - 'variables with existing output files. ' - '[Default: False]') -_PARSER_.add_argument('-t', '--tiling', default=16, type=int, - help='The integer number of processes per node to ' - 'request in parallel runs (ignored if running ' - 'in serial) [Default: 16]') -_PARSER_.add_argument('-w', '--wtime', default=240, type=int, - help='The number of minutes to request for the wall ' - 'clock in parallel runs (ignored if running in ' - 'serial) [Default: 240]') -_PARSER_.add_argument('test', type=str, nargs='*', - help='Name of test to run') +tests of the PyReshaper. Each named test (or all tests if +the -a or --all option is used) will be given a run +directory in the "rundirs" directory with the same +name as the test itself. The run script will be placed +in this run directory, as will be placed the run output +error file. All output data files will be placed in the +output subdirectory. +""" + +_PARSER_ = optparse.OptionParser(usage=_USAGE_, description=_DESC_) +_PARSER_.add_option('-a', '--all', default=False, + action='store_true', dest='all_tests', + help=('True or False, indicating whether to run all ' + 'tests [Default: False]')) +_PARSER_.add_option('-c', '--code', default='STDD0002', type=str, + help=('The name of the project code for charging in ' + 'parallel runs (ignored if running in serial) ' + '[Default: STDD0002]')) +_PARSER_.add_option('-i', '--infofile', default='testinfo.json', type=str, + help=('Location of the testinfo.json database file ' + '[Default: testinfo.json]')) +_PARSER_.add_option('-f', '--format', default='netcdf4c', + type=str, dest='ncformat', + help=('The NetCDF file format to use for the output ' + 'data produced by the test. [Default: netcdf4c]')) +_PARSER_.add_option('-l', '--list', default=False, + action='store_true', dest='list_tests', + help=('True or False, indicating whether to list all ' + 'tests, instead of running tests. [Default: False]')) +_PARSER_.add_option('-m', '--wmode', default='o', dest='wmode', + help=("Output file write mode: 'o' to overwrite, 'w' for " + "normal operation, 'a' to append to existing file, 's' " + "to skip existing files [Default: 'o']")) +_PARSER_.add_option('-n', '--nodes', default=0, type=int, + help=('The integer number of nodes to request in parallel' + ' runs (0 means run in serial) [Default: 0]')) +_PARSER_.add_option('-q', '--queue', default='economy', type=str, + help=('The name of the queue to request in parallel runs ' + '(ignored if running in serial) ' + '[Default: economy]')) +_PARSER_.add_option('-t', '--tiling', default=16, type=int, + help=('The integer number of processes per node to ' + 'request in parallel runs (ignored if running ' + 'in serial) [Default: 16]')) +_PARSER_.add_option('-w', '--wtime', default=240, type=int, + help=('The number of minutes to request for the wall ' + 'clock in parallel runs (ignored if running in ' + 'serial) [Default: 240]')) #============================================================================== # Write an executable Python script to run the Reshaper #============================================================================== def write_pyscript(testnames, scriptname='runscript.py', verbosity=3, - serial=False, skip_existing=False, overwrite=False): + serial=False, wmode='o', chunks=None): """ Write an executable Python script to run the PyReshaper with a set of specs @@ -100,10 +89,10 @@ def write_pyscript(testnames, scriptname='runscript.py', verbosity=3, scriptname (str): Name of the Python script to write verbosity (int): Level of output verbosity serial (bool): Whether to run in serial (True) or not (False) - skip_existing (bool): Whether to skip the generation of existing - time-series files (True) or not (False) - overwrite (bool): Whether to overwrite existing time-series files - (True) or not (False) + wmode (str): The mode to use when writing time-series files ('o' + to overwrite, 'a' to append to existing files, 'w' to write + new files, 's' to skip existing files) + chunks (dict): The dimensional chunking sizes to Read/Write operations """ # Start defining the Python script @@ -120,19 +109,23 @@ def write_pyscript(testnames, scriptname='runscript.py', verbosity=3, # Check for single or multiple specifiers if isinstance(testnames, (str, unicode)): pyscript_list.append( - 'specs = pickle.load(open("{0!s}.spec", "rb"))'.format(testnames)) + 'specs = pickle.load(open("{0!s}.s2s", "rb"))'.format(testnames)) elif isinstance(testnames, (list, tuple)): pyscript_list.append('specs = {}') for testname in testnames: pyscript_list.append( - 'specs["{0!s}"] = pickle.load(open("{0!s}.spec", "rb"))'.format(testname)) + 'specs["{0!s}"] = pickle.load(open("{0!s}.s2s", "rb"))'.format(testname)) + # Read the chunking information + pyscript_list.extend(['', + 'chunks = {0!s}'.format(chunks), + '']) + # Define the rest of the python script pyscript_list.extend([ - 'rshpr = reshaper.create_reshaper(specs, serial={0!s}, ' - 'verbosity={1!s}, skip_existing={2!s}, overwrite={3!s})'.format( - serial, verbosity, skip_existing, overwrite), - 'rshpr.convert()', + ('rshpr = reshaper.create_reshaper(specs, serial={0!s}, ' + 'verbosity={1!s}, wmode={2!r})').format(serial, verbosity, wmode), + 'rshpr.convert(chunks=chunks)', 'rshpr.print_diagnostics()', '']) @@ -145,113 +138,25 @@ def write_pyscript(testnames, scriptname='runscript.py', verbosity=3, os.chmod(scriptname, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH) -#============================================================================== -# Run a single multitest (using a MultiSpecReshaper) -#============================================================================== -def runmultitest(tests, nodes=0, tiling=16, minutes=120, - queue='economy', project='STDD0002', - skip_existing=False, overwrite=False, ncformat='netcdf4c'): - """ - Run a set of tests - - Parameters: - tests (list, tuple): List or tuple of test names to run - nodes (int): Number of nodes to run the test(s) with - tiling (int): Number of processes per node to run the test(s) with - minutes (int): Number of minutes to run the test(s) for - queue (str): Name of the queue to submit the job(s) to - project (str): Name of the project to charge the job time to - skip_existing (bool): Whether to skip the generation of existing - time-series files (True) or not (False) - overwrite (bool): Whether to overwrite existing time-series files - (True) or not (False) - ncformat (str): NetCDF format for the output - """ - - print 'Running tests in single submission:' - for test_name in tests: - print ' {0!s}'.format(test_name) - print - - # Set the test directory - if nodes > 0: - runtype = 'par{0!s}x{1!s}'.format(nodes, tiling) - else: - runtype = 'ser' - testdir = os.path.abspath( - os.path.join('results', 'multitest', runtype, ncformat)) - - # If the test directory doesn't exist, make it and move into it - cwd = os.getcwd() - if os.path.exists(testdir): - if overwrite: - shutil.rmtree(testdir) - else: - print "Already exists. Skipping." - return - if not os.path.exists(testdir): - os.makedirs(testdir) - os.chdir(testdir) - - # Create a separate output directory and specifier for each test - for test_name in tests: - - # Set the output directory - outputdir = os.path.join(testdir, 'output', str(test_name)) - - # If the output directory doesn't exists, create it - if not os.path.exists(outputdir): - os.makedirs(outputdir) - - # Create the specifier and write to file (specfile) - testspec = testdb.create_specifier(test_name=str(test_name), - ncfmt=ncformat, - outdir=outputdir) - testspecfile = str(test_name) + '.spec' - pickle.dump(testspec, open(testspecfile, 'wb')) - - # Write the Python executable to be run - pyscript_name = 'multitest.py' - write_pyscript(testnames=tests, scriptname=pyscript_name, - serial=(nodes <= 0), skip_existing=skip_existing, - overwrite=overwrite) - - # Generate the command and arguments - if nodes > 0: - runcmd = 'poe ./{0!s}'.format(pyscript_name) - else: - runcmd = './{0!s}'.format(pyscript_name) - - # Create and start the job - job = rt.Job(runcmds=[runcmd], nodes=nodes, - name='multitest', tiling=tiling, - minutes=minutes, queue=queue, - project=project) - job.start() - - os.chdir(cwd) - - #============================================================================== # Run tests individually #============================================================================== -def runindivtests(tests, nodes=0, tiling=16, minutes=120, - queue='economy', project='STDD0002', - skip_existing=False, overwrite=False, ncformat='netcdf4c'): +def runtests(tests, testdb, nodes=0, tiling=16, minutes=120, queue='economy', + project='STDD0002', wmode='o', ncformat='netcdf4c'): """ Run a set of tests Parameters: tests (list, tuple): List or tuple of test names to run + testdb (TestDB): The testing database nodes (int): Number of nodes to run the test(s) with tiling (int): Number of processes per node to run the test(s) with minutes (int): Number of minutes to run the test(s) for queue (str): Name of the queue to submit the job(s) to project (str): Name of the project to charge the job time to - skip_existing (bool): Whether to skip the generation of existing - time-series files (True) or not (False) - overwrite (bool): Whether to overwrite existing time-series files - (True) or not (False) + wmode (str): The mode to use when writing time-series files ('o' + to overwrite, 'a' to append to existing files, 'w' to write + new files, 's' to skip existing files) ncformat (str): NetCDF format for the output """ @@ -270,9 +175,9 @@ def runindivtests(tests, nodes=0, tiling=16, minutes=120, # If the test directory doesn't exist, make it and move into it if os.path.exists(testdir): - if overwrite: + if wmode == 'w': shutil.rmtree(testdir) - else: + elif wmode == 's': print " Already exists. Skipping." continue if not os.path.exists(testdir): @@ -290,14 +195,16 @@ def runindivtests(tests, nodes=0, tiling=16, minutes=120, testspec = testdb.create_specifier(test_name=str(test_name), ncfmt=ncformat, outdir=outputdir) - testspecfile = '{0!s}.spec'.format(test_name) + testspecfile = '{0!s}.s2s'.format(test_name) pickle.dump(testspec, open(testspecfile, 'wb')) + # Get chunk sizes + chunks = testdb.getdb()[test_name].get('chunks', None) + # Write the Python executable to be run pyscript_name = '{0!s}.py'.format(test_name) write_pyscript(testnames=test_name, scriptname=pyscript_name, - serial=(nodes <= 0), skip_existing=skip_existing, - overwrite=overwrite) + serial=(nodes <= 0), wmode=wmode, chunks=chunks) # Generate the command and arguments if nodes > 0: @@ -319,34 +226,27 @@ def runindivtests(tests, nodes=0, tiling=16, minutes=120, # Main Command-line Operation #============================================================================== if __name__ == '__main__': - args = _PARSER_.parse_args() + opts, args = _PARSER_.parse_args() # Check for tests to analyze - if len(args.test) == 0 and not args.all_tests and not args.list_tests: + if len(args) == 0 and not opts.all_tests and not opts.list_tests: _PARSER_.print_help() sys.exit(1) # Create/read the testing info and stats files - testdb = tt.TestDB(name=args.infofile) + testdb = tt.TestDB(name=opts.infofile) # List tests if only listing - if args.list_tests: + if opts.list_tests: testdb.display() sys.exit(1) # Generate the list of tests to run/analyze - if args.all_tests: + if opts.all_tests: test_list = testdb.getdb().keys() else: - test_list = [t for t in args.test if t in testdb.getdb()] + test_list = [t for t in args if t in testdb.getdb()] - if args.multispec: - runmultitest(test_list, nodes=args.nodes, tiling=args.tiling, - minutes=args.wtime, queue=args.queue, project=args.code, - skip_existing=args.skip_existing, overwrite=args.overwrite, - ncformat=args.ncformat) - else: - runindivtests(test_list, nodes=args.nodes, tiling=args.tiling, - minutes=args.wtime, queue=args.queue, project=args.code, - skip_existing=args.skip_existing, overwrite=args.overwrite, - ncformat=args.ncformat) + runtests(test_list, testdb, nodes=opts.nodes, tiling=opts.tiling, + minutes=opts.wtime, queue=opts.queue, project=opts.code, + wmode=opts.wmode, ncformat=opts.ncformat) diff --git a/tests/yellowstone/testinfo.json b/tests/yellowstone/testinfo.json index e4b602e3..69db4337 100644 --- a/tests/yellowstone/testinfo.json +++ b/tests/yellowstone/testinfo.json @@ -1,8 +1,8 @@ { "pop-1deg":{ "common_name":"POP-1.0", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/pop-1.0", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/pop-1.0", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/pop-1.0", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/pop-1.0", "input_globs":[ "b.e12.B1850C5CN.ne30_g16.init.ch.027.pop.h.000*.nc", "b.e12.B1850C5CN.ne30_g16.init.ch.027.pop.h.0010*.nc" @@ -12,12 +12,14 @@ "metadata":[ "time", "time_bound" - ] + ], + "chunks":{ + } }, "pop-daily-1deg":{ "common_name":"POP-DAILY-1.0", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/pop-daily-1.0", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/pop-daily-1.0", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/pop-daily-1.0", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/pop-daily-1.0", "input_globs":[ "b.e12.B1850C5CN.ne30_g16.init.ch.027.pop.h.nday1.0002-*.nc", "b.e12.B1850C5CN.ne30_g16.init.ch.027.pop.h.nday1.0003-*.nc", @@ -34,12 +36,15 @@ "metadata":[ "time", "time_bound" - ] + ], + "chunks":{ + "time": 100 + } }, "pop-0.1deg":{ "common_name":"POP-0.1", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/pop-0.1", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/pop-0.1", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/pop-0.1", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/pop-0.1", "input_globs":[ "v5_rel04_BC5_ne120_t12_pop62.pop.h.*.nc" ], @@ -48,12 +53,16 @@ "metadata":[ "time", "time_bound" - ] + ], + "chunks":{ + "z_t": 21, + "z_w_top": 21 + } }, "cice-1deg":{ "common_name":"CICE-1.0", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/cice-1.0", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/cice-1.0", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/cice-1.0", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/cice-1.0", "input_globs":[ "b.e12.B1850C5CN.ne30_g16.init.ch.027.cice.h.000*.nc", "b.e12.B1850C5CN.ne30_g16.init.ch.027.cice.h.0010*.nc" @@ -63,12 +72,14 @@ "metadata":[ "time", "time_bounds" - ] + ], + "chunks":{ + } }, "cice-0.1deg":{ "common_name":"CICE-0.1", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/cice-0.1", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/cice-0.1", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/cice-0.1", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/cice-0.1", "input_globs":[ "v5_rel04_BC5_ne120_t12_pop62.cice.h.000*.nc", "v5_rel04_BC5_ne120_t12_pop62.cice.h.0010*.nc" @@ -78,12 +89,14 @@ "metadata":[ "time", "time_bounds" - ] + ], + "chunks":{ + } }, "camse-1deg":{ "common_name":"CAMSE-1.0", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/camse-1.0", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/camse-1.0", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/camse-1.0", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/camse-1.0", "input_globs":[ "b.e12.B1850C5CN.ne30_g16.init.ch.027.cam.h0.000*.nc", "b.e12.B1850C5CN.ne30_g16.init.ch.027.cam.h0.0010*.nc" @@ -106,12 +119,14 @@ "time", "time_bnds", "time_written" - ] + ], + "chunks":{ + } }, "camse-0.25deg":{ "common_name":"CAMSE-0.25", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/camse-0.25", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/camse-0.25", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/camse-0.25", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/camse-0.25", "input_globs":[ "v5_rel04_BC5_ne120_t12_pop62.cam.h0.000*.nc", "v5_rel04_BC5_ne120_t12_pop62.cam.h0.0010*.nc" @@ -134,12 +149,14 @@ "time", "time_bnds", "time_written" - ] + ], + "chunks":{ + } }, "camfv-1deg":{ "common_name":"CAMFV-1.0", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/camfv-1.0", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/camfv-1.0", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/camfv-1.0", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/camfv-1.0", "input_globs":[ "b40.20th.track1.1deg.006.cam2.h0.185*.nc" ], @@ -161,12 +178,14 @@ "time", "time_bnds", "time_written" - ] + ], + "chunks":{ + } }, "camhf-monthly-1deg":{ "common_name":"CAMHF-MONTHLY-1.0", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/camhf-monthly-1.0", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/camhf-monthly-1.0", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/camhf-monthly-1.0", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/camhf-monthly-1.0", "input_globs":[ "b.e11.BDP.f09_g16.1958-11.002.cam.h0.19*.nc" ], @@ -211,12 +230,14 @@ "ntrk", "ntrm", "ntrn" - ] + ], + "chunks":{ + } }, "camhf-daily-1deg":{ "common_name":"CAMHF-DAILY-1.0", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/camhf-daily-1.0", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/camhf-daily-1.0", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/camhf-daily-1.0", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/camhf-daily-1.0", "input_globs":[ "b.e11.BDP.f09_g16.1958-11.002.cam.h1.19*.nc" ], @@ -261,12 +282,15 @@ "ntrk", "ntrm", "ntrn" - ] + ], + "chunks":{ + "time": 100 + } }, "camhf-6hourly-1deg":{ "common_name":"CAMHF-6HOURLY-1.0", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/camhf-6hourly-1.0", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/camhf-6hourly-1.0", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/camhf-6hourly-1.0", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/camhf-6hourly-1.0", "input_globs":[ "b.e11.BDP.f09_g16.1958-11.002.cam.h2.19*.nc" ], @@ -311,12 +335,15 @@ "ntrk", "ntrm", "ntrn" - ] + ], + "chunks":{ + "time": 100 + } }, "clmse-1deg":{ "common_name":"CLM-1.0", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/clmse-1.0", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/clmse-1.0", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/clmse-1.0", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/clmse-1.0", "input_globs":[ "b.e12.B1850C5CN.ne30_g16.init.ch.027.clm2.h0.000*.nc", "b.e12.B1850C5CN.ne30_g16.init.ch.027.clm2.h0.0010*.nc" @@ -333,12 +360,14 @@ "mdcur", "mscur", "nstep" - ] + ], + "chunks":{ + } }, "clmse-0.25deg":{ "common_name":"CLM-0.25", - "input_dir":"/glade/u/tdd/asap/bakeoff/hist/clmse-0.25", - "results_dir":"/glade/u/tdd/asap/bakeoff/tseries/clmse-0.25", + "input_dir":"/glade/p/tdd/asap/bakeoff/hist/clmse-0.25", + "results_dir":"/glade/p/tdd/asap/bakeoff/tseries/clmse-0.25", "input_globs":[ "v5_rel04_BC5_ne120_t12_pop62.clm2.h0.000*.nc", "v5_rel04_BC5_ne120_t12_pop62.clm2.h0.0010*.nc" @@ -355,6 +384,8 @@ "mdcur", "mscur", "nstep" - ] + ], + "chunks":{ + } } -} \ No newline at end of file +} diff --git a/tests/yellowstone/timings-v0.json b/tests/yellowstone/timings-v0.json new file mode 100644 index 00000000..513b3b0d --- /dev/null +++ b/tests/yellowstone/timings-v0.json @@ -0,0 +1,5464 @@ +{ + "CAMFV-1.0": { + "baseline": "/glade/u/tdd/asap/bakeoff/tseries/camfv-1.0", + "endYear": "1859", + "input": "/glade/u/tdd/asap/bakeoff/hist/camfv-1.0", + "isize": 28447.744, + "n2dVars": 82, + "n3dVars": 40, + "nVars": 122, + "osize": 28525.4, + "results": { + "cdo": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1940.0, + "sys": "caldera" + } + }, + "ncl": { + "140226-105745": { + "cores": 1, + "correct": "fail", + "kernel": 120.93, + "metadata": false, + "nodes": 1, + "real": 1767.76, + "sys": "geyser", + "user": 153.4 + } + }, + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1768.0, + "sys": "geyser" + } + }, + "ncr": { + "140226-173803": { + "cores": 4, + "correct": "fail", + "metadata": true, + "nodes": 1, + "real": 3353.0, + "sys": "caldera" + }, + "140303-175041": { + "cores": 4, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 3251.0, + "sys": "caldera" + }, + "140312-182909": { + "TS": 732.7, + "cores": 16, + "correct": "pass", + "metaTI": 1.27, + "metaTV": 164.81, + "metadata": true, + "nodes": 4, + "real": 936.0, + "sys": "caldera" + } + }, + "pagoda": { + "??????-??????": { + "cores": 4, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1236.0, + "sys": "caldera" + } + }, + "pynio": { + "140305-193740": { + "cores": 1, + "correct": "fail", + "kernel": 32.6, + "metadata": false, + "nodes": 1, + "real": 1523.02, + "sys": "geyser", + "user": 112.72 + }, + "140307-113151": { + "TS": 1463.81, + "actual": 84000.0, + "cores": 1, + "correct": false, + "kernel": 31.82, + "metaTI": 6.29, + "metaTV": 50.86, + "metadata": false, + "nodes": 1, + "openi": 6.65, + "openo": 0.78, + "real": 1542.99, + "request": 28447.83, + "sys": "geyser", + "user": 114.6 + }, + "140307-120707": { + "TS": 1824.31, + "actual": 77760.0, + "cores": 1, + "correct": false, + "kernel": 46.61, + "metaTI": 7.74, + "metaTV": 82.0, + "metadata": true, + "nodes": 1, + "openi": 6.19, + "openo": 0.44, + "real": 1933.83, + "request": 28447.83, + "sys": "geyser", + "user": 141.55 + } + }, + "pynio4_0": { + "140317-232104": { + "TS": 1900.95, + "actual": 956160.0, + "cores": 1, + "correct": false, + "kernel": 64.18, + "metaTI": 2.7, + "metaTV": 161.9, + "metadata": true, + "nodes": 1, + "openi": 6.46, + "openo": 0.26, + "real": 2077.05, + "request": 28447.83, + "sys": "geyser", + "user": 498.48 + } + }, + "pynio4_1": { + "140318-114642": { + "TS": 2706.45, + "actual": 956160.0, + "cores": 1, + "correct": false, + "kernel": 62.85, + "metaTI": 3.59, + "metaTV": 291.47, + "metadata": true, + "nodes": 1, + "openi": 7.08, + "openo": 0.91, + "real": 3020.34, + "request": 28447.83, + "sys": "geyser", + "user": 1174.47 + } + }, + "pyniompi": { + "140305-204127": { + "cores": 16, + "correct": "fail", + "kernel": 0.26, + "metadata": false, + "nodes": 4, + "real": 243.41, + "sys": "yellowstone", + "user": 0.22 + }, + "140310-164122": { + "TS": 197.8, + "actual": 77760.0, + "cores": 16, + "correct": false, + "kernel": 0.09, + "metaTI": 0.49, + "metaTV": 5.04, + "metadata": true, + "nodes": 4, + "openi": 5.71, + "openo": 0.14, + "real": 211.66, + "request": 28447.83, + "sys": "yellowstone", + "user": 0.18 + }, + "140313-154918": { + "TS": 248.07, + "actual": 77760.0, + "cores": 16, + "correct": false, + "kernel": 0.09, + "metaTI": 0.23, + "metaTV": 8.91, + "metadata": true, + "nodes": 4, + "openi": 5.66, + "openo": 0.14, + "real": 268.67, + "request": 28447.83, + "sys": "yellowstone", + "user": 0.18 + } + }, + "pyniompi4_0": { + "140317-234936": { + "TS": 182.06, + "actual": 956160.0, + "cores": 16, + "correct": false, + "kernel": 0.09, + "metaTI": 0.19, + "metaTV": 10.64, + "metadata": true, + "nodes": 4, + "openi": 5.84, + "openo": 0.27, + "real": 207.49, + "request": 28447.83, + "sys": "yellowstone", + "user": 0.18 + } + }, + "pyniompi4_1": { + "140318-092050": { + "TS": 337.4, + "actual": 956160.0, + "cores": 16, + "correct": false, + "kernel": 0.14, + "metaTI": 0.22, + "metaTV": 12.79, + "metadata": true, + "nodes": 4, + "openi": 5.77, + "openo": 0.22, + "real": 362.0, + "request": 28447.83, + "sys": "yellowstone", + "user": 0.16 + } + }, + "pyreshaper": { + "140624-113717": { + "TS": 469.639892101, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.462065219879, + "metaTV": 34.4386823177, + "metadata": true, + "nodes": 4, + "openi": 7.20728111267, + "openo": 0.362703323364, + "real": 499.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140826-145444": { + "TS": 244.512058496, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.462116003036, + "metaTV": 16.0183949471, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 4.00277686119, + "openo": 0.437393665314, + "real": 297.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140902-142335": { + "TS": 258.899525881, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.371918916702, + "metaTV": 23.6432528496, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 3.8312189579, + "openo": 0.474730968475, + "real": 309.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140911-110120": { + "TS": 169.407341719, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.230446100235, + "metaTV": 7.24861335754, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 4.6857380867, + "openo": 0.27854514122, + "real": 221.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140911-112751": { + "TS": 189.411576748, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.28161406517, + "metaTV": 11.0464272499, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 4.41655302048, + "openo": 0.388634204865, + "real": 243.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140915-153852": { + "TS": 139.93287158, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.356719017029, + "metaTV": 7.22233319283, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 3.86684203148, + "openo": 0.50390124321, + "real": 192.0, + "request": 28449.3301392, + "sys": "yellowstone" + } + }, + "pyreshaper4": { + "140624-113934": { + "TS": 481.015773535, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.442744970322, + "metaTV": 34.7662909031, + "metadata": true, + "nodes": 4, + "openi": 10.6994099617, + "openo": 0.54573059082, + "real": 557.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140826-145413": { + "TS": 240.958698273, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.340924263, + "metaTV": 18.6606128216, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 4.05443310738, + "openo": 0.380392551422, + "real": 266.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140902-134715": { + "TS": 185.440463305, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.230117082596, + "metaTV": 15.0816841125, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 4.1193420887, + "openo": 0.602751970291, + "real": 242.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140911-114028": { + "TS": 357.623367548, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.470070123672, + "metaTV": 30.8412137032, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 7.0916261673, + "openo": 0.382592201233, + "real": 436.0, + "request": 28449.3301392, + "sys": "yellowstone" + } + }, + "pyreshaper4c": { + "140624-113836": { + "TS": 502.167779446, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.599811792374, + "metaTV": 34.622944355, + "metadata": true, + "nodes": 4, + "openi": 6.6991379261, + "openo": 0.377572059631, + "real": 567.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140826-145444": { + "TS": 239.348858118, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.299512863159, + "metaTV": 15.5699160099, + "metadata": true, + "nodes": 4, + "openi": 4.13505911827, + "openo": 0.544854164124, + "real": 297.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140902-110236": { + "TS": 279.305563927, + "actual": 84960.0, + "cores": 16, + "metaTI": 0.154568433762, + "metaTV": 4.60655140877, + "metadata": true, + "nodes": 4, + "once": true, + "openi": 5.66544604301, + "openo": 0.297202825546, + "real": 327.0, + "request": 28447.8346252, + "sys": "yellowstone" + }, + "140902-133350": { + "TS": 245.287409306, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.318746089935, + "metaTV": 18.6890969276, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 6.27275109291, + "openo": 0.33965086937, + "real": 309.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140902-210623": { + "TS": 300.253802061, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.677854299545, + "metaTV": 23.3515269756, + "metadata": true, + "nodes": 1, + "once": false, + "openi": 4.75587201118, + "openo": 0.537309408188, + "real": 332.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "140911-114232": { + "TS": 401.958108187, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.509034633636, + "metaTV": 27.5770299435, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.226307869, + "openo": 0.364979028702, + "real": 467.0, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "150918-140320": { + "TS": 310.094833136, + "actual": 956160.0, + "cores": 16, + "metaTI": 0.48850774765, + "metaTV": 36.2081618309, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 4.52633786201, + "openo": 0.211630821228, + "real": 329.733877182, + "request": 28449.3301392, + "sys": "yellowstone" + }, + "151002-091959": { + "TS": 60.2626390457, + "actual": 968848.0, + "cores": 16, + "metaTI": 0.261739492416, + "metaTV": 10.9838540554, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 5.89429616928, + "openo": 0.257526159286, + "real": 190.701400042, + "request": 28450.9692535, + "sys": "yellowstone" + }, + "151002-120211": { + "TSr": 267.074586868, + "TSw": 60.4137539864, + "actual": 968848.0, + "cores": 16, + "metaTIr": 0.212244749069, + "metaTIw": 0.262345075607, + "metaTVr": 13.3320331573, + "metaTVw": 11.3042991161, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 4.30114293098, + "openo": 0.335148334503, + "real": 324.800256014, + "request": 28450.9692535, + "sys": "yellowstone" + }, + "151002-144720": { + "TSr": 267.789553165, + "TSw": 62.3434591293, + "actual": 968848.0, + "cores": 16, + "metaTIr": 0.148299455643, + "metaTIw": 0.26377415657, + "metaTVr": 15.7539906502, + "metaTVw": 10.9335258007, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 4.0840959549, + "openo": 0.172574281693, + "real": 323.57800293, + "request": 28450.9692535, + "sys": "yellowstone" + }, + "151005-133333": { + "TSr": 124.339257717, + "TSw": 61.037047863, + "actual": 968848.0, + "cores": 16, + "metaTIr": 0.0115587711334, + "metaTIw": 0.292886972427, + "metaTVr": 1.04787969589, + "metaTVw": 11.0901486874, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 20.3761889935, + "openo": 0.308133363724, + "real": 205.250231981, + "request": 28450.9692535, + "sys": "yellowstone" + }, + "151005-141124": { + "TSr": 251.438382626, + "TSw": 60.7321989536, + "actual": 968848.0, + "cores": 16, + "metaTIr": 0.0118687152863, + "metaTIw": 0.33265209198, + "metaTVr": 1.06335258484, + "metaTVw": 10.987541914, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 31.2857921124, + "openo": 0.316594362259, + "real": 336.083908081, + "request": 28450.9692535, + "sys": "yellowstone" + } + } + }, + "startYear": "1850" + }, + "CAMHF-6HOURLY-1.0": { + "results": { + "pyreshaper4c": { + "151002-105208": { + "TS": 3164.16029859, + "actual": 16880504.0, + "cores": 16, + "metaTI": 0.070193529129, + "metaTV": 302.568546534, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 224.951442003, + "openo": 0.540024995804, + "real": 4717.14768195, + "request": 412733.208378, + "sys": "yellowstone" + }, + "151002-132743": { + "TSr": 2102.44257331, + "TSw": 3221.23370409, + "actual": 16880504.0, + "cores": 16, + "metaTIr": 1.16329169273, + "metaTIw": 0.0410597324371, + "metaTVr": 578.328157187, + "metaTVw": 324.140898943, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 319.565881968, + "openo": 0.560957193375, + "real": 5139.13690186, + "request": 412733.208378, + "sys": "yellowstone" + }, + "151005-153203": { + "TSr": 1839.39458537, + "TSw": 3156.90317488, + "actual": 16880504.0, + "cores": 16, + "metaTIr": 1.22540211678, + "metaTIw": 0.101293802261, + "metaTVr": 568.625186205, + "metaTVw": 333.683583498, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 350.77940321, + "openo": 0.798895120621, + "real": 4755.39894795, + "request": 412733.208378, + "sys": "yellowstone" + } + } + } + }, + "CAMHF-DAILY-1.0": { + "results": { + "pyreshaper4c": { + "151002-094928": { + "TS": 106.482156515, + "actual": 11395904.0, + "cores": 16, + "metaTI": 0.0740783214569, + "metaTV": 146.638347387, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 45.2218770981, + "openo": 0.418075799942, + "real": 1020.13037086, + "request": 39295.5901566, + "sys": "yellowstone" + }, + "151002-121835": { + "TSr": 718.967712879, + "TSw": 109.465676069, + "actual": 11395904.0, + "cores": 16, + "metaTIr": 3.48129415512, + "metaTIw": 0.316914319992, + "metaTVr": 327.981246948, + "metaTVw": 163.808205605, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 51.1060910225, + "openo": 0.799788951874, + "real": 1241.54569411, + "request": 39295.5901566, + "sys": "yellowstone" + }, + "151002-145049": { + "TSr": 240.105856419, + "TSw": 94.4298458099, + "actual": 77864.0, + "cores": 16, + "metaTIr": 3.07635235786, + "metaTIw": 0.10428237915, + "metaTVr": 244.117375135, + "metaTVw": 7.25943517685, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 37.4065859318, + "openo": 0.587407112122, + "real": 469.319635868, + "request": 39295.5901566, + "sys": "yellowstone" + }, + "151005-134305": { + "TSr": 258.39025259, + "TSw": 91.9618022442, + "actual": 77864.0, + "cores": 16, + "metaTIr": 2.48077702522, + "metaTIw": 0.102338075638, + "metaTVr": 67.0126914978, + "metaTVw": 6.04324865341, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 136.388954163, + "openo": 0.433568954468, + "real": 479.639178038, + "request": 39295.5901566, + "sys": "yellowstone" + }, + "151005-142821": { + "TSr": 725.420089722, + "TSw": 107.214648724, + "actual": 11395904.0, + "cores": 16, + "metaTIr": 3.11352205276, + "metaTIw": 0.195004463196, + "metaTVr": 163.349392653, + "metaTVw": 165.829848766, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 148.906975985, + "openo": 1.03988480568, + "real": 1210.8861711, + "request": 39295.5901566, + "sys": "yellowstone" + } + } + } + }, + "CAMHF-MONTHLY-1.0": { + "results": { + "pyreshaper4c": { + "151002-093524": { + "TS": 84.0484206676, + "actual": 1023964.0, + "cores": 16, + "metaTI": 0.197278022766, + "metaTV": 8.23391604424, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 19.332545042, + "openo": 0.158255577087, + "real": 325.792993069, + "request": 41617.1186371, + "sys": "yellowstone" + }, + "151002-120324": { + "TSr": 303.185502052, + "TSw": 84.3467159271, + "actual": 1023964.0, + "cores": 16, + "metaTIr": 2.37086200714, + "metaTIw": 0.221300363541, + "metaTVr": 10.6845979691, + "metaTVw": 8.21553111076, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 24.5326368809, + "openo": 0.497053861618, + "real": 375.913740158, + "request": 41617.1186371, + "sys": "yellowstone" + }, + "151002-144915": { + "TSr": 300.091548204, + "TSw": 86.8913040161, + "actual": 1023964.0, + "cores": 16, + "metaTIr": 3.63008785248, + "metaTIw": 0.239748716354, + "metaTVr": 10.3928160667, + "metaTVw": 9.76353907585, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 26.914000988, + "openo": 0.475345373154, + "real": 384.981215954, + "request": 41617.1186371, + "sys": "yellowstone" + }, + "151005-133628": { + "TSr": 136.590059996, + "TSw": 84.718130827, + "actual": 1023964.0, + "cores": 16, + "metaTIr": 1.79091453552, + "metaTIw": 0.195056438446, + "metaTVr": 1.93241810799, + "metaTVw": 8.07740068436, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 75.4882974625, + "openo": 0.338253259659, + "real": 272.33850503, + "request": 41617.1186371, + "sys": "yellowstone" + }, + "151005-141354": { + "TSr": 239.311976194, + "TSw": 84.3950455189, + "actual": 1023964.0, + "cores": 16, + "metaTIr": 3.44647073746, + "metaTIw": 0.237613201141, + "metaTVr": 2.22094035149, + "metaTVw": 8.04119968414, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 118.201842785, + "openo": 0.282948732376, + "real": 418.47419095, + "request": 41617.1186371, + "sys": "yellowstone" + } + } + } + }, + "CAMSE-0.25": { + "baseline": "/glade/u/tdd/asap/bakeoff/tseries/camse-0.25", + "endYear": "0010", + "input": "/glade/u/tdd/asap/bakeoff/hist/camse-0.25", + "isize": 1074790.4, + "n2dVars": 101, + "n3dVars": 97, + "nVars": 198, + "osize": 1078586.3, + "results": { + "cdo": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 35221.0, + "sys": "geyser" + } + }, + "ncl": { + "140228-145958": { + "cores": 1, + "kernel": 1679.35, + "metadata": false, + "nodes": 1, + "real": 25973.02, + "sys": "geyser", + "user": 4582.95 + } + }, + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 17136.0, + "sys": "geyser" + } + }, + "ncr": { + "140303-130853": { + "cores": 16, + "correct": "pass", + "metadata": true, + "nodes": 4, + "real": 11597.0, + "sys": "caldera" + }, + "140312-193053": { + "TS": 3243.49, + "cores": 16, + "correct": "pass", + "metaTI": 21.05, + "metaTV": 468.2, + "metadata": true, + "nodes": 4, + "real": 3693.0, + "sys": "caldera" + } + }, + "pagoda": { + "??????-??????": { + "cores": 16, + "correct": "pass", + "metadata": true, + "nodes": 4, + "real": 20403.0, + "sys": "caldera" + } + }, + "pynio": { + "140306-094908": { + "cores": 1, + "correct": "fail", + "kernel": 1203.54, + "metadata": false, + "nodes": 1, + "real": 8277.37, + "sys": "geyser", + "user": 3875.32 + }, + "140313-160743": { + "TS": 11855.1, + "actual": 2544960.0, + "cores": 1, + "correct": "fail", + "kernel": 2012.42, + "metaTI": 48.72, + "metaTV": 474.69, + "metadata": true, + "nodes": 1, + "openi": 9.67, + "openo": 0.87, + "real": 12393.05, + "request": 1074992.99, + "sys": "geyser", + "user": 5717.35 + } + }, + "pyniompi": { + "140305-204455": { + "cores": 16, + "correct": "fail", + "kernel": 0.13, + "metadata": false, + "nodes": 4, + "real": 830.18, + "sys": "yellowstone", + "user": 0.23 + }, + "140313-160528": { + "TS": 1161.84, + "actual": 1119360.0, + "cores": 16, + "correct": "fail", + "kernel": 0.14, + "metaTI": 2.97, + "metaTV": 21.81, + "metadata": true, + "nodes": 4, + "openi": 11.31, + "openo": 0.09, + "real": 1204.44, + "request": 1074992.99, + "sys": "yellowstone", + "user": 0.24 + } + }, + "pyniompi4_0": { + "140318-070023": { + "TS": 1637.77, + "actual": 2544960.0, + "cores": 16, + "correct": "fail", + "kernel": 0.14, + "metaTI": 3.62, + "metaTV": 26.27, + "metadata": true, + "nodes": 4, + "openi": 10.9, + "openo": 0.3, + "real": 1688.02, + "request": 1074992.99, + "sys": "yellowstone", + "user": 0.24 + } + }, + "pyreshaper": { + "140624-115354": { + "TS": 1400.56879449, + "actual": 2544960.0, + "cores": 16, + "metaTI": 2.51281142235, + "metaTV": 28.7670600414, + "metadata": true, + "nodes": 4, + "openi": 11.0115630627, + "openo": 0.804432153702, + "real": 1485.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140826-185109": { + "TS": 1249.34267521, + "actual": 2544960.0, + "cores": 16, + "metaTI": 2.66328287125, + "metaTV": 23.6536338329, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.302508831, + "openo": 1.21547722816, + "real": 1328.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140902-144045": { + "TS": 1260.70107675, + "actual": 2544960.0, + "cores": 16, + "metaTI": 2.78524708748, + "metaTV": 28.7220621109, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.0819709301, + "openo": 0.501886844635, + "real": 1339.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140911-114308": { + "TS": 1073.58587337, + "actual": 2544960.0, + "cores": 16, + "metaTI": 2.69405317307, + "metaTV": 28.7266523838, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.8840389252, + "openo": 0.400128126144, + "real": 1160.0, + "request": 1074995.44647, + "sys": "yellowstone" + } + }, + "pyreshaper4": { + "140624-120120": { + "TS": 1781.45158434, + "actual": 2544960.0, + "cores": 16, + "metaTI": 3.85545730591, + "metaTV": 25.5409047604, + "metadata": true, + "nodes": 4, + "openi": 8.35853886604, + "openo": 0.244127988815, + "real": 1863.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140827-151200": { + "TS": 1299.53353667, + "actual": 2544960.0, + "cores": 16, + "metaTI": 4.34471225739, + "metaTV": 20.2178757191, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.7356190681, + "openo": 0.502675294876, + "real": 1378.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140902-140613": { + "TS": 1306.83785939, + "actual": 2544960.0, + "cores": 16, + "metaTI": 3.65520238876, + "metaTV": 19.6583509445, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.57616996765, + "openo": 0.55352139473, + "real": 1380.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140911-115857": { + "TS": 1406.00577044, + "actual": 2544960.0, + "cores": 16, + "metaTI": 4.1827378273, + "metaTV": 24.5323703289, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 7.99878907204, + "openo": 0.498881101608, + "real": 1486.0, + "request": 1074995.44647, + "sys": "yellowstone" + } + }, + "pyreshaper4c": { + "140624-154149": { + "TS": 3583.59778595, + "actual": 2544960.0, + "cores": 16, + "metaTI": 8.08878302574, + "metaTV": 30.8035469055, + "metadata": true, + "nodes": 4, + "openi": 10.7068860531, + "openo": 0.415813922882, + "real": 3675.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140827-153424": { + "TS": 2636.91293931, + "actual": 2544960.0, + "cores": 16, + "metaTI": 7.7899210453, + "metaTV": 25.9457168579, + "metadata": true, + "nodes": 4, + "openi": 10.7509250641, + "openo": 0.241142034531, + "real": 2722.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140902-114116": { + "TS": 2593.2846427, + "actual": 1126560.0, + "cores": 16, + "metaTI": 0.603957891464, + "metaTV": 1.47066664696, + "metadata": true, + "nodes": 4, + "once": true, + "openi": 10.3560228348, + "openo": 0.474064826965, + "real": 2647.0, + "request": 1074993.01163, + "sys": "yellowstone" + }, + "140902-141416": { + "TS": 2652.37349391, + "actual": 2544960.0, + "cores": 16, + "metaTI": 8.0114004612, + "metaTV": 22.7672245502, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.4009640217, + "openo": 0.888756275177, + "real": 2735.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140902-171344": { + "TS": 2968.51429629, + "actual": 2544960.0, + "cores": 16, + "metaTI": 8.98694396019, + "metaTV": 33.0424771309, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.9293580055, + "openo": 0.932354211807, + "real": 3033.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140902-215120": { + "TS": 2962.92944121, + "actual": 2544960.0, + "cores": 16, + "metaTI": 8.98501515388, + "metaTV": 29.6636142731, + "metadata": true, + "nodes": 1, + "once": false, + "openi": 9.88092207909, + "openo": 1.34773635864, + "real": 3029.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140903-072528": { + "TS": 1525.11489868, + "actual": 2544960.0, + "cores": 32, + "metaTI": 4.27042007446, + "metaTV": 14.0503649712, + "metadata": true, + "nodes": 2, + "once": false, + "openi": 9.37944698334, + "openo": 1.15070509911, + "real": 1563.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140903-095922": { + "TS": 561.568521738, + "actual": 2544960.0, + "cores": 128, + "metaTI": 1.82556605339, + "metaTV": 3.6412024498, + "metadata": true, + "nodes": 8, + "once": false, + "openi": 8.11461997032, + "openo": 1.0275592804, + "real": 619.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140903-100240": { + "TS": 994.121627808, + "actual": 2544960.0, + "cores": 64, + "metaTI": 2.71437478065, + "metaTV": 7.76852416992, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.9681520462, + "openo": 1.23905086517, + "real": 1057.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140903-100329": { + "TS": 535.087977648, + "actual": 2544960.0, + "cores": 192, + "metaTI": 1.12524223328, + "metaTV": 3.41003489494, + "metadata": true, + "nodes": 12, + "once": false, + "openi": 7.29357194901, + "openo": 0.894973993301, + "real": 589.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140903-100330": { + "TS": 534.806496859, + "actual": 2544960.0, + "cores": 160, + "metaTI": 1.2835021019, + "metaTV": 3.78852272034, + "metadata": true, + "nodes": 10, + "once": false, + "openi": 7.28589892387, + "openo": 1.07611322403, + "real": 590.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140903-100424": { + "TS": 819.987736464, + "actual": 2544960.0, + "cores": 96, + "metaTI": 2.11248207092, + "metaTV": 6.56551790237, + "metadata": true, + "nodes": 6, + "once": false, + "openi": 8.11874198914, + "openo": 0.758881092072, + "real": 876.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140903-105324": { + "TS": 517.756282568, + "actual": 2544960.0, + "cores": 224, + "metaTI": 1.02261710167, + "metaTV": 2.5079562664, + "metadata": true, + "nodes": 14, + "once": false, + "openi": 8.84300518036, + "openo": 0.71942615509, + "real": 572.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "140911-122056": { + "TS": 2685.49674749, + "actual": 2544960.0, + "cores": 16, + "metaTI": 8.03190565109, + "metaTV": 26.1082623005, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.18156194687, + "openo": 0.686249017715, + "real": 2771.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "150123-111155": { + "TS": 2621.06582308, + "actual": 2544960.0, + "cores": 16, + "metaTI": 7.96389293671, + "metaTV": 38.0018165112, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.776350975, + "openo": 0.612726926804, + "real": 2710.0, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "150918-144422": { + "TS": 2765.84281325, + "actual": 2544960.0, + "cores": 16, + "metaTI": 8.49992275238, + "metaTV": 47.4285974503, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.22591900826, + "openo": 0.864688158035, + "real": 2787.62379503, + "request": 1074995.44647, + "sys": "yellowstone" + }, + "151002-100638": { + "TS": 1835.60106206, + "actual": 2561592.0, + "cores": 16, + "metaTI": 6.68212819099, + "metaTV": 10.6226089001, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.9332418442, + "openo": 0.649513483047, + "real": 2207.67511892, + "request": 1078519.71414, + "sys": "yellowstone" + }, + "151002-123613": { + "TSr": 520.182231903, + "TSw": 1827.02842641, + "actual": 2561592.0, + "cores": 16, + "metaTIr": 2.53915166855, + "metaTIw": 6.77962827682, + "metaTVr": 40.9809556007, + "metaTVw": 12.277674675, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.0326859951, + "openo": 0.439650297165, + "real": 2362.43693519, + "request": 1078519.71414, + "sys": "yellowstone" + }, + "151002-152949": { + "TSr": 1009.50953722, + "TSw": 1854.70439887, + "actual": 2561592.0, + "cores": 16, + "metaTIr": 1.82785892487, + "metaTIw": 8.34366703033, + "metaTVr": 54.4348504543, + "metaTVw": 17.7780208588, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 13.8072900772, + "openo": 0.825301170349, + "real": 2840.41509485, + "request": 1078519.71414, + "sys": "yellowstone" + }, + "151005-144227": { + "TSr": 273.461855412, + "TSw": 1828.38464713, + "actual": 2561592.0, + "cores": 16, + "metaTIr": 0.79497218132, + "metaTIw": 6.30753397942, + "metaTVr": 1.44262719154, + "metaTVw": 10.0376775265, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 86.030144453, + "openo": 0.728790521622, + "real": 2194.46634698, + "request": 1078519.71414, + "sys": "yellowstone" + } + } + }, + "startYear": "0001" + }, + "CAMSE-1.0": { + "baseline": "/glade/u/tdd/asap/bakeoff/tseries/camse-1.0", + "endYear": "0010", + "input": "/glade/u/tdd/asap/bakeoff/hist/camse-1.0", + "isize": 30681.088, + "n2dVars": 89, + "n3dVars": 43, + "nVars": 132, + "osize": 30848.5, + "results": { + "cdo": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 2045.0, + "sys": "caldera" + } + }, + "ncl": { + "140226-105740": { + "cores": 1, + "correct": "fail", + "kernel": 106.11, + "metadata": false, + "nodes": 1, + "real": 1744.31, + "sys": "geyser", + "user": 156.66 + }, + "140310-150352": { + "cores": 1, + "correct": false, + "kernel": 64.39, + "metadata": true, + "nodes": 1, + "real": 1415.25, + "sys": "geyser", + "user": 139.09 + } + }, + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1675.0, + "sys": "geyser" + } + }, + "ncr": { + "140226-174338": { + "cores": 4, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 3508.0, + "sys": "caldera" + }, + "140312-174430": { + "TS": 584.01, + "cores": 16, + "correct": "pass", + "metaTI": 1.65, + "metaTV": 144.09, + "metadata": true, + "nodes": 4, + "real": 776.0, + "sys": "caldera" + } + }, + "pagoda": { + "??????-??????": { + "cores": 4, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1221.0, + "sys": "caldera" + } + }, + "pynio": { + "140306-122536": { + "cores": 1, + "correct": "fail", + "kernel": 47.7, + "metadata": false, + "nodes": 1, + "real": 1186.08, + "sys": "geyser", + "user": 142.21 + }, + "140313-140230": { + "TS": 2018.78, + "actual": 1034400.0, + "cores": 1, + "correct": "fail", + "kernel": 64.35, + "metaTI": 15.56, + "metaTV": 205.56, + "metadata": true, + "nodes": 1, + "openi": 10.53, + "openo": 0.8, + "real": 2252.34, + "request": 30680.31, + "sys": "geyser", + "user": 207.35 + }, + "140314-095342": { + "TS": 1823.92, + "actual": 1034400.0, + "cores": 1, + "correct": "fail", + "kernel": 64.35, + "metaTI": 15.81, + "metaTV": 187.139, + "metadata": true, + "nodes": 1, + "openi": 9.58, + "openo": 1.64, + "real": 2046.33, + "request": 30680.31, + "sys": "geyser", + "user": 209.87 + } + }, + "pynio4_0": { + "140317-211646": { + "TS": 2030.66, + "actual": 1034400.0, + "cores": 1, + "correct": "fail", + "kernel": 71.43, + "metaTI": 5.59, + "metaTV": 154.64, + "metadata": true, + "nodes": 1, + "openi": 7.5, + "openo": 0.72, + "real": 2207.53, + "request": 30680.31, + "sys": "geyser", + "user": 554.99 + } + }, + "pynio4_1": { + "140317-220052": { + "TS": 2487.96, + "actual": 1034400.0, + "cores": 1, + "correct": "fail", + "kernel": 65.25, + "metaTI": 5.93, + "metaTV": 143.5, + "metadata": true, + "nodes": 1, + "openi": 8.76, + "openo": 0.31, + "real": 2655.9, + "request": 30680.31, + "sys": "geyser", + "user": 1237.65 + } + }, + "pyniompi": { + "140305-202628": { + "cores": 16, + "correct": "fail", + "kernel": 0.26, + "metadata": false, + "nodes": 4, + "real": 241.44, + "sys": "yellowstone", + "user": 0.22 + }, + "140307-12401": { + "cores": 16, + "correct": "fail", + "kernel": 0.1, + "metadata": true, + "nodes": 4, + "real": 394.67, + "sys": "yellowstone", + "user": 0.19 + }, + "140313-131051": { + "TS": 326.62, + "actual": 84000.0, + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metaTI": 0.07, + "metaTV": 7.41, + "metadata": true, + "nodes": 4, + "openi": 7.87, + "openo": 0.08, + "real": 351.16, + "request": 30680.31, + "sys": "yellowstone", + "user": 0.18 + }, + "140313-152431": { + "TS": 274.12, + "actual": 84000.0, + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metaTI": 0.85, + "metaTV": 7.67, + "metadata": true, + "nodes": 4, + "openi": 7.87, + "openo": 0.12, + "real": 298.27, + "request": 30680.31, + "sys": "yellowstone", + "user": 0.18 + }, + "140314-095707": { + "TS": 274.95, + "actual": 84000.0, + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metaTI": 0.66, + "metaTV": 3.82, + "metadata": true, + "nodes": 4, + "openi": 6.57, + "openo": 0.09, + "real": 295.59, + "request": 30680.31, + "sys": "yellowstone", + "user": 0.18 + } + }, + "pyniompi4_0": { + "140317-220307": { + "TS": 296.53, + "actual": 1034400.0, + "cores": 16, + "correct": "fail", + "kernel": 0.09, + "metaTI": 0.33, + "metaTV": 8.92, + "metadata": true, + "nodes": 4, + "openi": 6.24, + "openo": 0.14, + "real": 320.67, + "request": 30680.31, + "sys": "yellowstone", + "user": 0.19 + } + }, + "pyniompi4_1": { + "140317-225919": { + "TS": 360.21, + "actual": 1034400.0, + "cores": 16, + "correct": "fail", + "kernel": 0.09, + "metaTI": 0.39, + "metaTV": 15.98, + "metadata": true, + "nodes": 4, + "openi": 6.24, + "openo": 0.14, + "real": 385.35, + "request": 30680.31, + "sys": "yellowstone", + "user": 0.19 + } + }, + "pyreshaper": { + "140624-113737": { + "TS": 461.879812002, + "actual": 1034400.0, + "cores": 16, + "metaTI": 2.49611783028, + "metaTV": 31.9543848038, + "metadata": true, + "nodes": 4, + "openi": 7.89434218407, + "openo": 0.142995357513, + "real": 546.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "140826-145447": { + "TS": 241.934791088, + "actual": 1034400.0, + "cores": 16, + "metaTI": 1.35345077515, + "metaTV": 16.4527909756, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.71761202812, + "openo": 0.303935050964, + "real": 300.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "140902-142338": { + "TS": 248.70229125, + "actual": 1034400.0, + "cores": 16, + "metaTI": 1.40357112885, + "metaTV": 28.3160479069, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.03130292892, + "openo": 0.431185722351, + "real": 312.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "140911-112745": { + "TS": 179.752914667, + "actual": 1034400.0, + "cores": 16, + "metaTI": 1.11748170853, + "metaTV": 11.5109965801, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.6885778904, + "openo": 0.423074483871, + "real": 237.0, + "request": 30681.9406128, + "sys": "yellowstone" + } + }, + "pyreshaper4": { + "140624-113928": { + "TS": 543.640252829, + "actual": 1034400.0, + "cores": 16, + "metaTI": 2.19070911407, + "metaTV": 35.477850914, + "metadata": true, + "nodes": 4, + "openi": 11.187032938, + "openo": 0.304791927338, + "real": 599.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "140826-145447": { + "TS": 242.617993832, + "actual": 1034400.0, + "cores": 16, + "metaTI": 0.595024824142, + "metaTV": 15.4887797832, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.71837592125, + "openo": 0.375550270081, + "real": 300.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "140902-134722": { + "TS": 191.421459198, + "actual": 1034400.0, + "cores": 16, + "metaTI": 0.926884889603, + "metaTV": 14.8415930271, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.79110503197, + "openo": 0.299824237823, + "real": 249.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "140911-114148": { + "TS": 389.183979034, + "actual": 1034400.0, + "cores": 16, + "metaTI": 1.48791742325, + "metaTV": 30.8276884556, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 14.9462389946, + "openo": 0.562728881836, + "real": 457.0, + "request": 30681.9406128, + "sys": "yellowstone" + } + }, + "pyreshaper4c": { + "140624-114030": { + "TS": 529.075484514, + "actual": 1034400.0, + "cores": 16, + "metaTI": 2.13327646255, + "metaTV": 26.9639163017, + "metadata": true, + "nodes": 4, + "openi": 15.6393549442, + "openo": 0.988724708557, + "real": 613.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "140826-145447": { + "TS": 243.839389324, + "actual": 1034400.0, + "cores": 16, + "metaTI": 0.800844907761, + "metaTV": 12.4930071831, + "metadata": true, + "nodes": 4, + "openi": 8.7155380249, + "openo": 0.751301765442, + "real": 300.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "140902-110246": { + "TS": 280.340123892, + "actual": 91200.0, + "cores": 16, + "metaTI": 0.464305877686, + "metaTV": 4.72494339943, + "metadata": true, + "nodes": 4, + "once": true, + "openi": 8.61912894249, + "openo": 0.507271051407, + "real": 337.0, + "request": 30680.3215027, + "sys": "yellowstone" + }, + "140902-133355": { + "TS": 252.199251652, + "actual": 1034400.0, + "cores": 16, + "metaTI": 0.640518188477, + "metaTV": 11.9670262337, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.66032004356, + "openo": 0.285562992096, + "real": 309.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "140902-210623": { + "TS": 287.249596357, + "actual": 1034400.0, + "cores": 16, + "metaTI": 0.681195020676, + "metaTV": 31.8642385006, + "metadata": true, + "nodes": 1, + "once": false, + "openi": 19.1077411175, + "openo": 1.23596668243, + "real": 332.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "140911-114234": { + "TS": 396.764893293, + "actual": 1034400.0, + "cores": 16, + "metaTI": 1.6780602932, + "metaTV": 23.1965000629, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 15.0613510609, + "openo": 0.554198741913, + "real": 469.0, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "150918-140346": { + "TS": 345.118621349, + "actual": 1034400.0, + "cores": 16, + "metaTI": 1.34499812126, + "metaTV": 47.3280603886, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 3.53514695168, + "openo": 0.332359075546, + "real": 377.197709084, + "request": 30681.9406128, + "sys": "yellowstone" + }, + "151002-091617": { + "TS": 65.0861947536, + "actual": 1043904.0, + "cores": 16, + "metaTI": 0.534062385559, + "metaTV": 15.8530282974, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 7.89265394211, + "openo": 0.417589902878, + "real": 198.518553019, + "request": 30828.9682617, + "sys": "yellowstone" + }, + "151002-120250": { + "TSr": 292.098123789, + "TSw": 64.9243013859, + "actual": 1043904.0, + "cores": 16, + "metaTIr": 0.472710609436, + "metaTIw": 0.53111410141, + "metaTVr": 14.7551693916, + "metaTVw": 16.2517373562, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 6.55523085594, + "openo": 0.544054508209, + "real": 370.380084991, + "request": 30828.9682617, + "sys": "yellowstone" + }, + "151002-144824": { + "TSr": 300.078349829, + "TSw": 107.732641459, + "actual": 1043904.0, + "cores": 16, + "metaTIr": 0.865505695343, + "metaTIw": 0.587111234665, + "metaTVr": 12.9958343506, + "metaTVw": 31.3837080002, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 2.28778886795, + "openo": 1.09594678879, + "real": 389.049459934, + "request": 30828.9682617, + "sys": "yellowstone" + }, + "151005-133419": { + "TSr": 142.834536552, + "TSw": 65.3223130703, + "actual": 1043904.0, + "cores": 16, + "metaTIr": 0.0972127914429, + "metaTIw": 0.550078868866, + "metaTVr": 1.72045087814, + "metaTVw": 15.8371667862, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 36.3499717712, + "openo": 0.30549287796, + "real": 248.811717987, + "request": 30828.9682617, + "sys": "yellowstone" + }, + "151005-141258": { + "TSr": 302.199320793, + "TSw": 65.4644916058, + "actual": 1043904.0, + "cores": 16, + "metaTIr": 0.102242946625, + "metaTIw": 0.530309677124, + "metaTVr": 2.07044291496, + "metaTVw": 16.0213608742, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 63.9010326862, + "openo": 0.194200992584, + "real": 430.064186811, + "request": 30828.9682617, + "sys": "yellowstone" + } + } + }, + "startYear": "0001" + }, + "CICE-0.1": { + "baseline": "/glade/u/tdd/asap/bakeoff/tseries/cice-0.1", + "endYear": "0010", + "input": "/glade/u/tdd/asap/bakeoff/hist/cice-0.1", + "isize": 444023.808, + "n2dVars": 112, + "nVars": 112, + "osize": 42112.4, + "results": { + "cdo": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 52644.0, + "sys": "geyser" + } + }, + "ncl": { + "140303-153053": { + "cores": 1, + "correct": "fail", + "kernel": 2041.37, + "metadata": true, + "nodes": 1, + "real": 42931.93, + "sys": "geyser", + "user": 2850.04 + } + }, + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 8909.0, + "sys": "geyser" + } + }, + "ncr": { + "140226-172014": { + "cores": 4, + "correct": "fail", + "metadata": true, + "nodes": 1, + "real": 1894.0, + "sys": "caldera" + }, + "140312-191124": { + "TS": 1496.09, + "cores": 16, + "correct": "fail", + "metaTI": 340.73, + "metaTV": 382.07, + "metadata": true, + "nodes": 4, + "real": 2275.0, + "sys": "caldera" + } + }, + "pagoda": { + "??????-??????": { + "cores": 16, + "correct": "pass", + "metadata": true, + "nodes": 4, + "real": 7643.0, + "sys": "caldera" + } + }, + "pynio": { + "140306-095228": { + "cores": 1, + "correct": "fail", + "kernel": 615.82, + "metadata": false, + "nodes": 1, + "real": 5123.69, + "sys": "geyser", + "user": 2224.77 + }, + "140313-191710": { + "TS": 8314.32, + "actual": 591360.0, + "cores": 1, + "correct": "fail", + "kernel": 1142.96, + "metaTI": 863.84, + "metaTV": 250.88, + "metadata": true, + "nodes": 1, + "openi": 10.87, + "openo": 0.43, + "real": 9441.99, + "request": 442968.9, + "sys": "yellowstone", + "user": 3773.87 + } + }, + "pynio4_0": { + "140317-232134": { + "TS": 10980.29, + "actual": 591360.0, + "cores": 1, + "correct": "fail", + "kernel": 1239.65, + "metaTI": 2122.46, + "metaTV": 252.9, + "metadata": true, + "nodes": 1, + "openi": 11.49, + "openo": 0.15, + "real": 13369.16, + "request": 442968.9, + "sys": "yellowstone", + "user": 8880.6 + } + }, + "pyniompi": { + "140305-214140": { + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metadata": false, + "nodes": 4, + "real": 499.0, + "sys": "yellowstone", + "user": 0.19 + }, + "140313-160555": { + "TS": 506.93, + "actual": 483840.0, + "cores": 16, + "correct": "fail", + "kernel": 0.33, + "metaTI": 49.12, + "metaTV": 7.06, + "metadata": true, + "nodes": 4, + "openi": 11.02, + "openo": 0.33, + "real": 506.94, + "request": 442968.75, + "sys": "yellowstone", + "user": 0.22 + } + }, + "pyniompi4_0": { + "140318-070023": { + "TS": 625.78, + "actual": 591360.0, + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metaTI": 82.63, + "metaTV": 4.44, + "metadata": true, + "nodes": 4, + "openi": 10.87, + "openo": 0.21, + "real": 723.09, + "request": 442968.75, + "sys": "yellowstone", + "user": 0.22 + } + }, + "pyniompi4_1": { + "140318-092050": { + "TS": 810.31, + "actual": 591360.0, + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metaTI": 109.63, + "metaTV": 12.48, + "metadata": true, + "nodes": 4, + "openi": 11.27, + "openo": 0.21, + "real": 945.86, + "request": 442968.75, + "sys": "yellowstone", + "user": 0.22 + } + }, + "pyreshaper": { + "140624-114000": { + "TS": 526.702405453, + "actual": 591360.0, + "cores": 16, + "metaTI": 68.0012328625, + "metaTV": 12.8914823532, + "metadata": true, + "nodes": 4, + "openi": 9.54658794403, + "openo": 0.496648788452, + "real": 631.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "140826-183939": { + "TS": 516.087095737, + "actual": 591360.0, + "cores": 16, + "metaTI": 68.3514671326, + "metaTV": 4.9983458519, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.4577100277, + "openo": 0.327159166336, + "real": 638.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "140902-142858": { + "TS": 526.552634001, + "actual": 591360.0, + "cores": 16, + "metaTI": 52.2834627628, + "metaTV": 7.26059746742, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.8995859623, + "openo": 0.3378469944, + "real": 632.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "140911-113541": { + "TS": 510.946885824, + "actual": 591360.0, + "cores": 16, + "metaTI": 48.8930177689, + "metaTV": 7.55659413338, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.9417448044, + "openo": 0.309250116348, + "real": 615.0, + "request": 442968.903809, + "sys": "yellowstone" + } + }, + "pyreshaper4": { + "140624-114226": { + "TS": 627.303998947, + "actual": 591360.0, + "cores": 16, + "metaTI": 92.6080269814, + "metaTV": 8.79434752464, + "metadata": true, + "nodes": 4, + "openi": 9.54019212723, + "openo": 0.270900964737, + "real": 777.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "140827-150143": { + "TS": 622.689025164, + "actual": 591360.0, + "cores": 16, + "metaTI": 85.026517868, + "metaTV": 5.28267073631, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.65681600571, + "openo": 0.476130962372, + "real": 761.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "140902-135549": { + "TS": 611.696000099, + "actual": 591360.0, + "cores": 16, + "metaTI": 91.8807518482, + "metaTV": 8.92097759247, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.3194150925, + "openo": 0.477138996124, + "real": 756.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "140911-114735": { + "TS": 663.592682123, + "actual": 591360.0, + "cores": 16, + "metaTI": 115.95199728, + "metaTV": 12.9579110146, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.26338791847, + "openo": 0.33904671669, + "real": 804.0, + "request": 442968.903809, + "sys": "yellowstone" + } + }, + "pyreshaper4c": { + "140624-114838": { + "TS": 764.767405272, + "actual": 591360.0, + "cores": 16, + "metaTI": 111.515741587, + "metaTV": 10.9106748104, + "metadata": true, + "nodes": 4, + "openi": 8.38807797432, + "openo": 0.644446849823, + "real": 946.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "140827-150440": { + "TS": 761.094282866, + "actual": 591360.0, + "cores": 16, + "metaTI": 114.62279439, + "metaTV": 9.76032447815, + "metadata": true, + "nodes": 4, + "openi": 9.65682888031, + "openo": 0.308700799942, + "real": 938.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "140902-111112": { + "TS": 776.177241087, + "actual": 484800.0, + "cores": 16, + "metaTI": 13.9352908134, + "metaTV": 3.02018046379, + "metadata": true, + "nodes": 4, + "once": true, + "openi": 10.2728459835, + "openo": 0.466406345367, + "real": 843.0, + "request": 442968.751373, + "sys": "yellowstone" + }, + "140902-134411": { + "TS": 780.895755291, + "actual": 591360.0, + "cores": 16, + "metaTI": 114.899274826, + "metaTV": 10.0706048012, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 11.0469231606, + "openo": 0.573573112488, + "real": 925.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "140902-211735": { + "TS": 827.541911125, + "actual": 591360.0, + "cores": 16, + "metaTI": 135.3350811, + "metaTV": 12.0612881184, + "metadata": true, + "nodes": 1, + "once": false, + "openi": 11.1946439743, + "openo": 1.40033817291, + "real": 1004.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "140911-115045": { + "TS": 823.123622179, + "actual": 591360.0, + "cores": 16, + "metaTI": 113.007497072, + "metaTV": 10.0991332531, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.7019701004, + "openo": 0.274614095688, + "real": 960.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "150115-172148": { + "TS": 481.313974142, + "actual": 591360.0, + "cores": 32, + "metaTI": 73.4051861763, + "metaTV": 5.76744818687, + "metadata": true, + "nodes": 2, + "once": false, + "openi": 10.6601059437, + "openo": 0.632927179337, + "real": 583.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "150115-174118": { + "TS": 256.810166836, + "actual": 591360.0, + "cores": 64, + "metaTI": 34.1589169502, + "metaTV": 2.17316675186, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.4423499107, + "openo": 0.746605396271, + "real": 337.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "150115-180958": { + "TS": 250.097705364, + "actual": 591360.0, + "cores": 96, + "metaTI": 34.0083539486, + "metaTV": 2.6528646946, + "metadata": true, + "nodes": 6, + "once": false, + "openi": 8.67340183258, + "openo": 0.761589050293, + "real": 338.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "150115-201821": { + "TS": 148.282890081, + "actual": 591360.0, + "cores": 128, + "metaTI": 12.9395039082, + "metaTV": 0.515069484711, + "metadata": true, + "nodes": 8, + "once": false, + "openi": 8.87768483162, + "openo": 0.856188058853, + "real": 212.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "150115-201850": { + "TS": 148.89666605, + "actual": 591360.0, + "cores": 160, + "metaTI": 13.6156411171, + "metaTV": 0.580461025238, + "metadata": true, + "nodes": 10, + "once": false, + "openi": 9.20113587379, + "openo": 0.783620834351, + "real": 213.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "150115-203102": { + "TS": 817.954832315, + "actual": 591360.0, + "cores": 16, + "metaTI": 130.260757685, + "metaTV": 11.7173800468, + "metadata": true, + "nodes": 1, + "once": false, + "openi": 8.85170078278, + "openo": 1.00009465218, + "real": 973.0, + "request": 442968.903809, + "sys": "yellowstone" + }, + "150918-141419": { + "TS": 861.581918955, + "actual": 591360.0, + "cores": 16, + "metaTI": 132.98685503, + "metaTV": 11.4721367359, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.71458387375, + "openo": 0.56676697731, + "real": 1004.08923078, + "request": 442968.903809, + "sys": "yellowstone" + }, + "151002-092925": { + "TS": 376.300118208, + "actual": 715008.0, + "cores": 16, + "metaTI": 70.3662366867, + "metaTV": 0.59819149971, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 11.5092349052, + "openo": 0.235598564148, + "real": 785.061555147, + "request": 561093.903809, + "sys": "yellowstone" + }, + "151002-121023": { + "TSr": 317.063103914, + "TSw": 378.894732237, + "actual": 715008.0, + "cores": 16, + "metaTIr": 48.0411641598, + "metaTIw": 70.4985556602, + "metaTVr": 9.33440876007, + "metaTVw": 0.652191162109, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.04424095154, + "openo": 0.267147064209, + "real": 811.08354497, + "request": 561093.903809, + "sys": "yellowstone" + }, + "151002-150144": { + "TSr": 619.917356491, + "TSw": 379.453398466, + "actual": 715008.0, + "cores": 16, + "metaTIr": 102.077307701, + "metaTIw": 78.5123889446, + "metaTVr": 23.8168251514, + "metaTVw": 2.38540887833, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 21.7854847908, + "openo": 0.407775163651, + "real": 1168.82769799, + "request": 561093.903809, + "sys": "yellowstone" + }, + "151005-134132": { + "TSr": 160.806319952, + "TSw": 379.374226332, + "actual": 715008.0, + "cores": 16, + "metaTIr": 25.2732410431, + "metaTIw": 70.726893425, + "metaTVr": 0.279175758362, + "metaTVw": 0.473909139633, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 48.6959922314, + "openo": 0.246194124222, + "real": 676.466734886, + "request": 561093.903809, + "sys": "yellowstone" + }, + "151005-141659": { + "TSr": 153.592193604, + "TSw": 377.051423788, + "actual": 715008.0, + "cores": 16, + "metaTIr": 23.6473412514, + "metaTIw": 70.3968391418, + "metaTVr": 0.163871049881, + "metaTVw": 0.453300952911, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 45.6162559986, + "openo": 0.26899600029, + "real": 666.109079123, + "request": 561093.903809, + "sys": "yellowstone" + } + } + }, + "startYear": "0001" + }, + "CICE-1.0": { + "baseline": "/glade/u/tdd/asap/bakeoff/tseries/cice-1.0", + "endYear": "0010", + "input": "/glade/u/tdd/asap/bakeoff/hist/cice-1.0", + "isize": 6596.608, + "n2dVars": 117, + "nVars": 117, + "osize": 3923.2, + "results": { + "cdo": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1266.0, + "sys": "caldera" + } + }, + "ncl": { + "140226-090346": { + "cores": 1, + "correct": "fail", + "kernel": 30.49, + "metadata": false, + "nodes": 1, + "real": 122.42, + "sys": "geyser", + "user": 41.92 + } + }, + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1717.0, + "sys": "geyser" + } + }, + "ncr": { + "140226-165327": { + "cores": 4, + "correct": "fail", + "metadata": true, + "nodes": 1, + "real": 351.0, + "sys": "caldera" + }, + "140312-192224": { + "TS": 590.53, + "cores": 16, + "correct": "fail", + "metaTI": 3.88, + "metaTV": 24.01, + "metadata": true, + "nodes": 4, + "real": 646.0, + "sys": "caldera" + } + }, + "pagoda": { + "??????-??????": { + "cores": 4, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1472.0, + "sys": "caldera" + } + }, + "pynio": { + "140306-085654": { + "cores": 1, + "correct": "fail", + "kernel": 10.99, + "metadata": false, + "nodes": 1, + "real": 583.14, + "sys": "geyser", + "user": 37.36 + }, + "140313-140222": { + "TS": 749.0, + "actual": 56160.0, + "cores": 1, + "correct": "fail", + "kernel": 21.67, + "metaTI": 24.15, + "metaTV": 43.49, + "metadata": true, + "nodes": 1, + "openi": 11.29, + "openo": 0.06, + "real": 829.98, + "request": 6581.25, + "sys": "geyser", + "user": 73.95 + } + }, + "pynio4_0": { + "140317-232127": { + "TS": 738.4, + "actual": 168480.0, + "cores": 1, + "correct": "fail", + "kernel": 19.79, + "metaTI": 36.44, + "metaTV": 29.97, + "metadata": true, + "nodes": 1, + "openi": 8.05, + "openo": 0.14, + "real": 814.78, + "request": 6581.25, + "sys": "geyser", + "user": 150.37 + } + }, + "pynio4_1": { + "140317-114654": { + "TS": 955.55, + "actual": 168480.0, + "cores": 1, + "correct": "fail", + "kernel": 16.99, + "metaTI": 61.52, + "metaTV": 96.4, + "metadata": true, + "nodes": 1, + "openi": 10.16, + "openo": 0.65, + "real": 1125.94, + "request": 6581.25, + "sys": "geyser", + "user": 206.09 + } + }, + "pyniompi": { + "140306-085823": { + "cores": 16, + "correct": "fail", + "kernel": 0.1, + "metadata": false, + "nodes": 4, + "real": 60.68, + "sys": "yellowstone", + "user": 0.18 + }, + "140313-154954": { + "TS": 77.32, + "actual": 56160.0, + "cores": 16, + "correct": "fail", + "kernel": 0.07, + "metaTI": 1.95, + "metaTV": 3.78, + "metadata": true, + "nodes": 4, + "openi": 10.54, + "openo": 0.1, + "real": 98.22, + "request": 6581.25, + "sys": "yellowstone", + "user": 0.19 + } + }, + "pyniompi4_0": { + "140318-070023": { + "TS": 78.05, + "actual": 168480.0, + "cores": 16, + "correct": "fail", + "kernel": 0.07, + "metaTI": 2.96, + "metaTV": 2.97, + "metadata": true, + "nodes": 4, + "openi": 9.39, + "openo": 0.18, + "real": 99.4, + "request": 6581.25, + "sys": "yellowstone", + "user": 0.18 + } + }, + "pyniompi4_1": { + "140318-092051": { + "TS": 73.51, + "actual": 168480.0, + "cores": 16, + "correct": "fail", + "kernel": 0.07, + "metaTI": 2.28, + "metaTV": 2.36, + "metadata": true, + "nodes": 4, + "openi": 9.43, + "openo": 0.32, + "real": 94.29, + "request": 6581.25, + "sys": "yellowstone", + "user": 0.18 + } + }, + "pyreshaper": { + "140624-113558": { + "TS": 265.71512866, + "actual": 168480.0, + "cores": 16, + "metaTI": 5.01533293724, + "metaTV": 11.8876111507, + "metadata": true, + "nodes": 4, + "openi": 19.778968811, + "openo": 0.388194084167, + "real": 341.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "140826-145234": { + "TS": 113.901770353, + "actual": 168480.0, + "cores": 16, + "metaTI": 3.08545422554, + "metaTV": 5.78772473335, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.86034393311, + "openo": 0.28881072998, + "real": 167.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "140902-142114": { + "TS": 118.544679165, + "actual": 168480.0, + "cores": 16, + "metaTI": 3.13189768791, + "metaTV": 13.6027126312, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 7.65998196602, + "openo": 0.59086060524, + "real": 168.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "140911-112739": { + "TS": 73.3629565239, + "actual": 168480.0, + "cores": 16, + "metaTI": 1.93174624443, + "metaTV": 2.94866061211, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 13.4689209461, + "openo": 0.409775018692, + "real": 133.0, + "request": 6581.41067505, + "sys": "yellowstone" + } + }, + "pyreshaper4": { + "140624-113559": { + "TS": 266.083644629, + "actual": 168480.0, + "cores": 16, + "metaTI": 4.95094203949, + "metaTV": 10.9867026806, + "metadata": true, + "nodes": 4, + "openi": 19.8844599724, + "openo": 0.547864675522, + "real": 347.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "140826-145234": { + "TS": 113.744300365, + "actual": 168480.0, + "cores": 16, + "metaTI": 4.30774283409, + "metaTV": 7.11053514481, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.86026287079, + "openo": 0.19504904747, + "real": 167.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "140902-134448": { + "TS": 70.0048351288, + "actual": 168480.0, + "cores": 16, + "metaTI": 2.3086950779, + "metaTV": 3.15719771385, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.03446006775, + "openo": 0.369714021683, + "real": 88.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "140911-113815": { + "TS": 177.28745842, + "actual": 168480.0, + "cores": 16, + "metaTI": 5.62634396553, + "metaTV": 7.50685191154, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 14.2388157845, + "openo": 0.467931509018, + "real": 244.0, + "request": 6581.41067505, + "sys": "yellowstone" + } + }, + "pyreshaper4c": { + "140624-113440": { + "TS": 240.76835227, + "actual": 168480.0, + "cores": 16, + "metaTI": 3.82919192314, + "metaTV": 5.04663062096, + "metadata": true, + "nodes": 4, + "openi": 18.8568351269, + "openo": 0.306517124176, + "real": 311.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "140826-145234": { + "TS": 113.022646427, + "actual": 168480.0, + "cores": 16, + "metaTI": 2.44539260864, + "metaTV": 3.75828146935, + "metadata": true, + "nodes": 4, + "openi": 9.8609559536, + "openo": 0.511830091476, + "real": 167.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "140902-105902": { + "TS": 91.887765646, + "actual": 57120.0, + "cores": 16, + "metaTI": 1.58539175987, + "metaTV": 4.56849193573, + "metadata": true, + "nodes": 4, + "once": true, + "openi": 8.57277202606, + "openo": 0.412314414978, + "real": 113.0, + "request": 6581.25137329, + "sys": "yellowstone" + }, + "140902-133113": { + "TS": 96.3482551575, + "actual": 168480.0, + "cores": 16, + "metaTI": 2.94213962555, + "metaTV": 3.6310069561, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.32849788666, + "openo": 0.319946289062, + "real": 147.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "140902-210319": { + "TS": 104.103149891, + "actual": 168480.0, + "cores": 16, + "metaTI": 6.43604660034, + "metaTV": 20.8584721088, + "metadata": true, + "nodes": 1, + "once": false, + "openi": 18.3028130531, + "openo": 0.899433851242, + "real": 148.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "140911-113908": { + "TS": 196.712829828, + "actual": 168480.0, + "cores": 16, + "metaTI": 5.53936100006, + "metaTV": 6.65181231499, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 17.4854698181, + "openo": 0.200965881348, + "real": 263.0, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "150918-140000": { + "TS": 120.856712103, + "actual": 168480.0, + "cores": 16, + "metaTI": 4.0064227581, + "metaTV": 2.71381306648, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.30154585838, + "openo": 0.190922498703, + "real": 125.465778112, + "request": 6581.41067505, + "sys": "yellowstone" + }, + "151002-093114": { + "TS": 7.29186058044, + "actual": 177840.0, + "cores": 16, + "metaTI": 1.62267613411, + "metaTV": 0.319401979446, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.7917881012, + "openo": 0.256956577301, + "real": 85.7960050106, + "request": 8336.41067505, + "sys": "yellowstone" + }, + "151002-115913": { + "TSr": 131.842990875, + "TSw": 7.56044721603, + "actual": 177840.0, + "cores": 16, + "metaTIr": 1.72517442703, + "metaTIw": 1.63546466827, + "metaTVr": 6.24339962006, + "metaTVw": 0.316979408264, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.83018398285, + "openo": 0.647891044617, + "real": 141.086723089, + "request": 8336.41067505, + "sys": "yellowstone" + }, + "151002-144535": { + "TSr": 164.895143509, + "TSw": 8.39152407646, + "actual": 177840.0, + "cores": 16, + "metaTIr": 2.69702959061, + "metaTIw": 1.62958669662, + "metaTVr": 6.13357424736, + "metaTVw": 0.329475164413, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 12.0156641006, + "openo": 2.29768490791, + "real": 177.504949093, + "request": 8336.41067505, + "sys": "yellowstone" + }, + "151005-133333": { + "TSr": 73.7382771969, + "TSw": 7.17182159424, + "actual": 177840.0, + "cores": 16, + "metaTIr": 1.14449071884, + "metaTIw": 1.64758038521, + "metaTVr": 0.0861296653748, + "metaTVw": 0.355614185333, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 18.1041100025, + "openo": 0.222780942917, + "real": 116.379180908, + "request": 8336.41067505, + "sys": "yellowstone" + }, + "151005-141033": { + "TSr": 179.972988367, + "TSw": 7.15631198883, + "actual": 177840.0, + "cores": 16, + "metaTIr": 1.48803186417, + "metaTIw": 1.63100767136, + "metaTVr": 0.086678981781, + "metaTVw": 0.353240013123, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 19.2583482265, + "openo": 0.235752820969, + "real": 248.850775957, + "request": 8336.41067505, + "sys": "yellowstone" + } + } + }, + "startYear": "0001" + }, + "CLM-0.25": { + "baseline": "/glade/u/tdd/asap/bakeoff/tseries/clmse-0.25", + "endYear": "0010", + "input": "/glade/u/tdd/asap/bakeoff/hist/clmse-0.25", + "isize": 81802.24, + "n2dVars": 150, + "nVars": 150, + "osize": 124702.8, + "results": { + "ncl": { + "140228-145409": { + "cores": 1, + "kernel": 194.42, + "metadata": false, + "nodes": 1, + "real": 3358.85, + "sys": "geyser", + "user": 377.15 + } + }, + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 4141.0, + "sys": "geyser" + } + }, + "ncr": { + "140303-105425": { + "cores": 12, + "correct": "pass", + "metadata": true, + "real": 4301.0, + "sys": "caldera" + }, + "140312-193441": { + "TS": 385.05, + "cores": 16, + "correct": "pass", + "metaTI": 181.87, + "metaTV": 104.3, + "metadata": true, + "nodes": 4, + "real": 723.0, + "sys": "caldera" + } + }, + "pagoda": { + "??????-??????": { + "cores": 16, + "correct": "pass", + "metadata": true, + "nodes": 4, + "real": 5493.0, + "sys": "caldera" + } + }, + "pynio": { + "140306-091858": { + "cores": 1, + "correct": "fail", + "kernel": 86.93, + "metadata": false, + "nodes": 1, + "real": 1694.12, + "sys": "geyser", + "user": 415.54 + }, + "140313-191915": { + "TS": 2534.72, + "cores": 1, + "kernel": 203.25, + "metaTI": 418.15, + "metaTV": 323.28, + "metadata": true, + "nodes": 1, + "openi": 11.03, + "openo": 0.8, + "real": 3289.98, + "sys": "geyser", + "user": 838.8 + } + }, + "pyniompi": { + "140306-091653": { + "cores": 16, + "correct": "fail", + "kernel": 0.1, + "metadata": false, + "nodes": 4, + "real": 174.86, + "sys": "yellowstone", + "user": 0.17 + }, + "140313-160541": { + "TS": 222.28, + "cores": 16, + "kernel": 0.24, + "metaTI": 26.88, + "metaTV": 9.63, + "metadata": true, + "nodes": 4, + "openi": 10.39, + "openo": 0.09, + "real": 266.2, + "sys": "yellowstone", + "user": 0.29 + } + }, + "pyniompi4_0": { + "140318-070023": { + "TS": 255.0, + "actual": 613440.0, + "cores": 16, + "correct": "fail", + "kernel": 0.1, + "metaTI": 37.85, + "metaTV": 10.41, + "metadata": true, + "nodes": 4, + "openi": 10.81, + "openo": 0.16, + "real": 309.31, + "request": 81515.14, + "sys": "yellowstone", + "user": 0.19 + } + }, + "pyniompi4_1": { + "140318-091904": { + "TS": 296.05, + "actual": 613440.0, + "cores": 16, + "correct": "fail", + "kernel": 0.1, + "metaTI": 46.37, + "metaTV": 12.85, + "metadata": true, + "nodes": 4, + "openi": 10.67, + "openo": 0.23, + "real": 354.78, + "request": 81515.14, + "sys": "yellowstone", + "user": 0.19 + } + }, + "pyreshaper": { + "140624-113329": { + "TS": 217.386395931, + "actual": 749760.0, + "cores": 16, + "metaTI": 27.5108315945, + "metaTV": 11.7934093475, + "metadata": true, + "nodes": 4, + "openi": 8.86293721199, + "openo": 0.513876199722, + "real": 271.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "140826-183413": { + "TS": 228.074203253, + "actual": 749760.0, + "cores": 16, + "metaTI": 27.8776259422, + "metaTV": 14.3903722763, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.19029212, + "openo": 0.420016527176, + "real": 312.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "140902-142333": { + "TS": 229.419958115, + "actual": 749760.0, + "cores": 16, + "metaTI": 28.2502429485, + "metaTV": 12.5835442543, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.6433389187, + "openo": 0.182495117188, + "real": 307.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "140911-113052": { + "TS": 229.645380735, + "actual": 749760.0, + "cores": 16, + "metaTI": 29.3559648991, + "metaTV": 15.3165843487, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.9768149853, + "openo": 0.276948690414, + "real": 318.0, + "request": 81515.3311157, + "sys": "yellowstone" + } + }, + "pyreshaper4": { + "140624-113608": { + "TS": 253.195567369, + "actual": 749760.0, + "cores": 16, + "metaTI": 36.1291060448, + "metaTV": 14.0747671127, + "metadata": true, + "nodes": 4, + "openi": 7.50084495544, + "openo": 0.289590358734, + "real": 351.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "140827-145505": { + "TS": 262.098815203, + "actual": 749760.0, + "cores": 16, + "metaTI": 40.8452181816, + "metaTV": 14.2391810417, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.89155387878, + "openo": 0.295248270035, + "real": 363.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "140902-135021": { + "TS": 264.649072409, + "actual": 749760.0, + "cores": 16, + "metaTI": 38.9351382256, + "metaTV": 14.8586471081, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.3381459713, + "openo": 0.494373321533, + "real": 356.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "140911-114041": { + "TS": 277.828287601, + "actual": 749760.0, + "cores": 16, + "metaTI": 42.8577725887, + "metaTV": 15.5075862408, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.52782797813, + "openo": 0.650231122971, + "real": 390.0, + "request": 81515.3311157, + "sys": "yellowstone" + } + }, + "pyreshaper4c": { + "140624-113607": { + "TS": 293.581233501, + "actual": 749760.0, + "cores": 16, + "metaTI": 44.2273983955, + "metaTV": 11.2281382084, + "metadata": true, + "nodes": 4, + "openi": 7.57388806343, + "openo": 0.776012897491, + "real": 398.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "140827-145505": { + "TS": 259.073281527, + "actual": 749760.0, + "cores": 16, + "metaTI": 44.2974903584, + "metaTV": 7.55031824112, + "metadata": true, + "nodes": 4, + "openi": 8.77940702438, + "openo": 0.629299879074, + "real": 363.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "140902-110305": { + "TS": 301.58286643, + "actual": 106080.0, + "cores": 16, + "metaTI": 15.0113711357, + "metaTV": 2.47730088234, + "metadata": true, + "nodes": 4, + "once": true, + "openi": 10.8128950596, + "openo": 0.278973579407, + "real": 356.0, + "request": 81514.3762207, + "sys": "yellowstone" + }, + "140902-133446": { + "TS": 296.484625578, + "actual": 749760.0, + "cores": 16, + "metaTI": 46.2099742889, + "metaTV": 13.8055045605, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.4409430027, + "openo": 0.632032871246, + "real": 360.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "140902-210745": { + "TS": 310.555556536, + "actual": 749760.0, + "cores": 16, + "metaTI": 54.604319334, + "metaTV": 18.0128221512, + "metadata": true, + "nodes": 1, + "once": false, + "openi": 10.94216609, + "openo": 0.77930521965, + "real": 414.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "140911-114209": { + "TS": 329.421911955, + "actual": 749760.0, + "cores": 16, + "metaTI": 49.9806575775, + "metaTV": 15.6132264137, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.77800393105, + "openo": 0.532899856567, + "real": 444.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "150115-171626": { + "TS": 208.854488611, + "actual": 749760.0, + "cores": 32, + "metaTI": 27.8093998432, + "metaTV": 9.49487662315, + "metadata": true, + "nodes": 2, + "once": false, + "openi": 10.8636159897, + "openo": 0.587514877319, + "real": 261.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "150115-174419": { + "TS": 166.342139482, + "actual": 749760.0, + "cores": 64, + "metaTI": 14.7328977585, + "metaTV": 4.93773913383, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.89057898521, + "openo": 0.849577188492, + "real": 235.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "150115-181055": { + "TS": 155.178570032, + "actual": 749760.0, + "cores": 96, + "metaTI": 10.2803800106, + "metaTV": 2.88407659531, + "metadata": true, + "nodes": 6, + "once": false, + "openi": 9.95401000977, + "openo": 0.725273132324, + "real": 229.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "150115-201849": { + "TS": 140.330249071, + "actual": 749760.0, + "cores": 160, + "metaTI": 4.11911892891, + "metaTV": 8.6886074543, + "metadata": true, + "nodes": 10, + "once": false, + "openi": 8.32369208336, + "openo": 0.702972888947, + "real": 212.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "150115-202111": { + "TS": 299.16028142, + "actual": 749760.0, + "cores": 16, + "metaTI": 50.0943911076, + "metaTV": 13.3113751411, + "metadata": true, + "nodes": 1, + "once": false, + "openi": 9.67527198792, + "openo": 0.882090806961, + "real": 382.0, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "150918-140302": { + "TS": 225.530354023, + "actual": 749760.0, + "cores": 16, + "metaTI": 71.1400485039, + "metaTV": 30.6447684765, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.88314318657, + "openo": 0.319357633591, + "real": 307.761037111, + "request": 81515.3311157, + "sys": "yellowstone" + }, + "151002-093416": { + "TS": 64.9041206837, + "actual": 798360.0, + "cores": 16, + "metaTI": 42.5193638802, + "metaTV": 3.18136024475, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.3529610634, + "openo": 0.151391983032, + "real": 261.155253887, + "request": 124675.246468, + "sys": "yellowstone" + }, + "151002-120117": { + "TSr": 121.410551786, + "TSw": 64.8986198902, + "actual": 798360.0, + "cores": 16, + "metaTIr": 24.5644390583, + "metaTIw": 42.7191648483, + "metaTVr": 19.7610211372, + "metaTVw": 3.1215763092, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 11.1755421162, + "openo": 0.393360853195, + "real": 262.587915897, + "request": 124675.246468, + "sys": "yellowstone" + }, + "151002-145027": { + "TSr": 257.785825014, + "TSw": 95.3491647243, + "actual": 798360.0, + "cores": 16, + "metaTIr": 49.3381197453, + "metaTIw": 55.4652349949, + "metaTVr": 50.4830415249, + "metaTVw": 4.86789798737, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 21.76009202, + "openo": 0.745949745178, + "real": 462.231551886, + "request": 124675.246468, + "sys": "yellowstone" + }, + "151005-133706": { + "TSr": 127.764489651, + "TSw": 65.0523197651, + "actual": 798360.0, + "cores": 16, + "metaTIr": 11.5128691196, + "metaTIw": 43.0957920551, + "metaTVr": 0.697492361069, + "metaTVw": 3.21077013016, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 71.471419096, + "openo": 0.5601978302, + "real": 319.536077023, + "request": 124675.246468, + "sys": "yellowstone" + }, + "151005-141143": { + "TSr": 124.386564493, + "TSw": 65.0036051273, + "actual": 798360.0, + "cores": 16, + "metaTIr": 11.1683745384, + "metaTIw": 42.7969913483, + "metaTVr": 0.692959308624, + "metaTVw": 2.90459418297, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 69.9783821106, + "openo": 0.696071147919, + "real": 313.190394878, + "request": 124675.246468, + "sys": "yellowstone" + } + } + }, + "startYear": "0001" + }, + "CLM-1.0": { + "baseline": "/glade/u/tdd/asap/bakeoff/tseries/clmse-1.0", + "endYear": "0010", + "input": "/glade/u/tdd/asap/bakeoff/hist/clmse-1.0", + "isize": 8694.784, + "n2dVars": 297, + "n3dVars": 0, + "nVars": 297, + "osize": 14051.0, + "results": { + "cdo": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 2788.0, + "sys": "caldera" + } + }, + "ncl": { + "140228-142411": { + "cores": 1, + "correct": "fail", + "kernel": 45.35, + "metadata": false, + "nodes": 1, + "real": 205.48, + "sys": "geyser", + "user": 47.44 + }, + "140228-143030": { + "cores": 1, + "correct": "fail", + "kernel": 44.6, + "metadata": false, + "nodes": 1, + "real": 220.15, + "sys": "geyser", + "user": 48.45 + } + }, + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 4321.0, + "sys": "geyser" + } + }, + "ncr": { + "140226-173600": { + "cores": 4, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 2713.0, + "sys": "caldera" + }, + "140312-200047": { + "TS": 1153.94, + "cores": 16, + "correct": "pass", + "metaTI": 23.35, + "metaTV": 542.14, + "metadata": true, + "nodes": 4, + "real": 1782.0, + "sys": "caldera" + } + }, + "pagoda": { + "??????-??????": { + "cores": 4, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1789.0, + "sys": "caldera" + } + }, + "pynio": { + "140306-090318": { + "cores": 1, + "correct": "fail", + "kernel": 28.22, + "metadata": false, + "nodes": 1, + "real": 1359.24, + "sys": "geyser", + "user": 68.68 + }, + "140313-140215": { + "TS": 1235.35, + "actual": 147840.0, + "cores": 1, + "correct": "fail", + "kernel": 55.29, + "metaTI": 60.26, + "metaTV": 135.67, + "metadata": true, + "nodes": 1, + "openi": 13.75, + "openo": 1.95, + "real": 1452.57, + "request": 8832.56, + "sys": "geyser", + "user": 162.48 + } + }, + "pynio4_0": { + "140317-232111": { + "TS": 1174.86, + "actual": 1148160.0, + "cores": 1, + "correct": "fail", + "kernel": 47.37, + "metaTI": 106.8, + "metaTV": 124.61, + "metadata": true, + "nodes": 1, + "openi": 11.67, + "openo": 0.5, + "real": 1421.16, + "request": 8678.32, + "sys": "geyser", + "user": 304.33 + } + }, + "pynio4_1": { + "140318-114712": { + "TS": 1451.53, + "actual": 1148160.0, + "cores": 1, + "correct": "fail", + "kernel": 43.67, + "metaTI": 146.12, + "metaTV": 206.4, + "metadata": true, + "nodes": 1, + "openi": 11.81, + "openo": 1.66, + "real": 1820.33, + "request": 8678.32, + "sys": "geyser", + "user": 426.28 + } + }, + "pyniompi": { + "140306-091538": { + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metadata": false, + "nodes": 4, + "real": 129.54, + "sys": "yellowstone", + "user": 0.2 + }, + "140313-131051": { + "TS": 146.97, + "actual": 143520.0, + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metaTI": 1.8, + "metaTV": 3.95, + "metadata": true, + "nodes": 4, + "openi": 10.49, + "openo": 0.12, + "real": 172.44, + "request": 8676.81, + "sys": "yellowstone", + "user": 0.19 + }, + "140313-155000": { + "TS": 158.54, + "actual": 143520.0, + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metaTI": 1.8, + "metaTV": 5.83, + "metadata": true, + "nodes": 4, + "openi": 10.49, + "openo": 0.12, + "real": 183.07, + "request": 8676.81, + "sys": "yellowstone", + "user": 0.19 + }, + "140314-101030": { + "TS": 112.91, + "actual": 1148160.0, + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metaTI": 2.66, + "metaTV": 5.16, + "metadata": true, + "nodes": 4, + "openi": 9.67, + "openo": 0.13, + "real": 139.56, + "request": 8676.81, + "sys": "yellowstone", + "user": 0.18 + } + }, + "pyniompi4_0": { + "140318-070023": { + "TS": 156.21, + "actual": 1148160.0, + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metaTI": 6.95, + "metaTV": 6.95, + "metadata": true, + "nodes": 4, + "openi": 10.6, + "openo": 0.33, + "real": 189.24, + "request": 8676.81, + "sys": "yellowstone", + "user": 0.18 + } + }, + "pyniompi4_1": { + "140318-091904": { + "TS": 129.37, + "actual": 1148160.0, + "cores": 16, + "correct": "fail", + "kernel": 0.11, + "metaTI": 5.47, + "metaTV": 4.62, + "metadata": true, + "nodes": 4, + "openi": 9.89, + "openo": 0.2, + "real": 151.48, + "request": 8676.81, + "sys": "yellowstone", + "user": 0.18 + } + }, + "pyreshaper": { + "140624-113616": { + "TS": 362.838088274, + "actual": 1425600.0, + "cores": 16, + "metaTI": 3.64182257652, + "metaTV": 7.80311584473, + "metadata": true, + "nodes": 4, + "openi": 13.7385029793, + "openo": 0.708451747894, + "real": 427.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "140826-145351": { + "TS": 185.680801392, + "actual": 1425600.0, + "cores": 16, + "metaTI": 3.53920269012, + "metaTV": 6.57377076149, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.960504055, + "openo": 0.712677240372, + "real": 244.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "140902-142228": { + "TS": 162.275549173, + "actual": 1425600.0, + "cores": 16, + "metaTI": 11.8885095119, + "metaTV": 34.244243145, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 8.73659610748, + "openo": 0.758320569992, + "real": 242.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "140911-112912": { + "TS": 119.784294128, + "actual": 1425600.0, + "cores": 16, + "metaTI": 4.4601495266, + "metaTV": 7.02146315575, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 11.810557127, + "openo": 0.630494594574, + "real": 182.0, + "request": 8678.71307373, + "sys": "yellowstone" + } + }, + "pyreshaper4": { + "140624-113802": { + "TS": 384.561959028, + "actual": 1425600.0, + "cores": 16, + "metaTI": 6.64893960953, + "metaTV": 15.0808794498, + "metadata": true, + "nodes": 4, + "openi": 19.5430710316, + "openo": 0.769854307175, + "real": 465.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "140826-145351": { + "TS": 176.847893715, + "actual": 1425600.0, + "cores": 16, + "metaTI": 6.18663406372, + "metaTV": 16.3740646839, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.9581079483, + "openo": 0.189343929291, + "real": 244.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "140902-134804": { + "TS": 128.163312912, + "actual": 1425600.0, + "cores": 16, + "metaTI": 4.74730086327, + "metaTV": 12.2831828594, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 11.5698750019, + "openo": 1.13589835167, + "real": 196.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "140911-113952": { + "TS": 293.202519417, + "actual": 1425600.0, + "cores": 16, + "metaTI": 8.82940626144, + "metaTV": 19.2759275436, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 14.2772920132, + "openo": 1.35260295868, + "real": 341.0, + "request": 8678.71307373, + "sys": "yellowstone" + } + }, + "pyreshaper4c": { + "140624-113733": { + "TS": 385.772614479, + "actual": 1425600.0, + "cores": 16, + "metaTI": 7.3999941349, + "metaTV": 12.4398062229, + "metadata": true, + "nodes": 4, + "openi": 19.5401780605, + "openo": 0.989186286926, + "real": 436.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "140826-145351": { + "TS": 175.992659807, + "actual": 1425600.0, + "cores": 16, + "metaTI": 6.88678193092, + "metaTV": 14.0521204472, + "metadata": true, + "nodes": 4, + "openi": 10.9594869614, + "openo": 0.499489068985, + "real": 244.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "140902-110102": { + "TS": 177.58859539, + "actual": 146880.0, + "cores": 16, + "metaTI": 0.88392496109, + "metaTV": 4.15271639824, + "metadata": true, + "nodes": 4, + "once": true, + "openi": 11.6738321781, + "openo": 0.889152765274, + "real": 233.0, + "request": 8676.81610107, + "sys": "yellowstone" + }, + "140902-133236": { + "TS": 163.407021046, + "actual": 1425600.0, + "cores": 16, + "metaTI": 5.86030912399, + "metaTV": 11.3345866203, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 11.1746912003, + "openo": 0.731638431549, + "real": 230.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "140902-210436": { + "TS": 158.254923105, + "actual": 1425600.0, + "cores": 16, + "metaTI": 18.5883505344, + "metaTV": 33.6075720787, + "metadata": true, + "nodes": 1, + "once": false, + "openi": 20.3818750381, + "openo": 1.33059597015, + "real": 225.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "140911-114047": { + "TS": 284.039642811, + "actual": 1425600.0, + "cores": 16, + "metaTI": 6.48289012909, + "metaTV": 12.7032730579, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 15.5982160568, + "openo": 0.793869018555, + "real": 362.0, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "150918-140219": { + "TS": 242.346611023, + "actual": 1425600.0, + "cores": 16, + "metaTI": 8.48106193542, + "metaTV": 15.5442285538, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.54877281189, + "openo": 1.01668596268, + "real": 264.768231869, + "request": 8678.71307373, + "sys": "yellowstone" + }, + "151002-093313": { + "TS": 10.7341187, + "actual": 1443420.0, + "cores": 16, + "metaTI": 5.90268445015, + "metaTV": 10.5968191624, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.87163710594, + "openo": 0.614326238632, + "real": 204.401623964, + "request": 14019.9852104, + "sys": "yellowstone" + }, + "151002-120140": { + "TSr": 255.788938046, + "TSw": 9.34615063667, + "actual": 1443420.0, + "cores": 16, + "metaTIr": 1.61454749107, + "metaTIw": 5.90498423576, + "metaTVr": 8.47008776665, + "metaTVw": 10.5631215572, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.37232899666, + "openo": 0.265953063965, + "real": 287.702008009, + "request": 14019.9852104, + "sys": "yellowstone" + }, + "151002-144726": { + "TSr": 269.169423342, + "TSw": 13.1295950413, + "actual": 1443420.0, + "cores": 16, + "metaTIr": 1.14924407005, + "metaTIw": 5.88382959366, + "metaTVr": 4.54588770866, + "metaTVw": 10.5600767136, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.0409920216, + "openo": 1.07933664322, + "real": 300.97479105, + "request": 14019.9852104, + "sys": "yellowstone" + }, + "151005-133542": { + "TSr": 161.287609339, + "TSw": 9.55008554459, + "actual": 1443420.0, + "cores": 16, + "metaTIr": 1.74680042267, + "metaTIw": 5.95859718323, + "metaTVr": 1.14477300644, + "metaTVw": 10.8875396252, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 86.8880727291, + "openo": 0.510288476944, + "real": 287.98132515, + "request": 14019.9852104, + "sys": "yellowstone" + }, + "151005-141339": { + "TSr": 312.091857433, + "TSw": 9.57465410233, + "actual": 1443420.0, + "cores": 16, + "metaTIr": 3.0339858532, + "metaTIw": 5.92696642876, + "metaTVr": 1.17320466042, + "metaTVw": 10.8604056835, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 91.3150508404, + "openo": 0.4595246315, + "real": 435.240852833, + "request": 14019.9852104, + "sys": "yellowstone" + } + } + }, + "startYear": "0001" + }, + "POP-0.1": { + "baseline": "/glade/u/tdd/asap/bakeoff/tseries/pop-0.1", + "endYear": "0010", + "input": "/glade/u/tdd/asap/bakeoff/hist/pop-0.1", + "isize": 3030384.64, + "n2dVars": 23, + "n3dVars": 11, + "nVars": 34, + "osize": 3124562.75, + "results": { + "ncl": { + "140306-200340": { + "cores": 1, + "correct": "fail", + "kernel": 4956.16, + "metadata": false, + "nodes": 1, + "real": 30509.62, + "sys": "geyser", + "user": 12327.82 + } + }, + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 52558.0, + "sys": "geyser" + } + }, + "ncr": { + "140312-220039": { + "TS": 8365.7, + "cores": 16, + "correct": "pass", + "metaTI": 255.62, + "metaTV": 78.48, + "metadata": true, + "nodes": 4, + "real": 8745.0, + "sys": "caldera" + } + }, + "pagoda": { + "??????-??????": { + "cores": 16, + "correct": "pass", + "nodes": 4, + "real": 53051.0, + "sys": "geyser" + } + }, + "pynio": { + "140306-092824": { + "cores": 1, + "correct": "fail", + "kernel": 4047.26, + "metadata": false, + "nodes": 1, + "real": 20526.06, + "sys": "geyser", + "user": 14145.3 + }, + "140313-192244": { + "TS": 33980.16, + "actual": 3071040.0, + "cores": 1, + "correct": false, + "kernel": 6749.21, + "metaTI": 343.98, + "metaTV": 129.17, + "metadata": true, + "nodes": 1, + "openi": 10.93, + "openo": 0.1, + "real": 34465.08, + "request": 3029589.9, + "sys": "geyser", + "user": 22193.31 + } + }, + "pyniompi": { + "140314-130728": { + "TS": 4157.04, + "actual": 3071040.0, + "cores": 16, + "correct": false, + "kernel": 0.11, + "metaTI": 24.89, + "metaTV": 9.75, + "metadata": true, + "nodes": 4, + "openi": 12.99, + "openo": 0.16, + "real": 4205.33, + "request": 3029589.94, + "sys": "yellowstone", + "user": 0.23 + } + }, + "pyniompi4_0": { + "140318-070102": { + "TS": 6096.63, + "actual": 3071040.0, + "cores": 16, + "correct": false, + "kernel": 0.24, + "metaTI": 40.16, + "metaTV": 8.26, + "metadata": true, + "nodes": 4, + "openi": 9.97, + "openo": 0.16, + "real": 6148.8, + "request": 3029589.94, + "sys": "yellowstone", + "user": 0.43 + } + }, + "pyniompi4_1": { + "140318-092050": { + "TS": 12840.39, + "actual": 3071040.0, + "cores": 16, + "correct": false, + "kernel": 0.24, + "metaTI": 62.78, + "metaTV": 7.74, + "metadata": true, + "nodes": 4, + "openi": 11.32, + "openo": 0.26, + "real": 12906.67, + "request": 3029589.94, + "sys": "yellowstone", + "user": 0.43 + } + }, + "pyreshaper": { + "140625-133224": { + "TS": 4188.19754171, + "actual": 3071040.0, + "cores": 16, + "metaTI": 23.1918101311, + "metaTV": 10.8097374439, + "metadata": true, + "nodes": 4, + "openi": 9.9564101696, + "openo": 0.165270090103, + "real": 4269.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "140826-190453": { + "TS": 2057.59516311, + "actual": 3071040.0, + "cores": 16, + "metaTI": 30.1707749367, + "metaTV": 9.16880011559, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 12.6679458618, + "openo": 0.262907028198, + "real": 2152.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "140902-145453": { + "TS": 2094.32604527, + "actual": 3071040.0, + "cores": 16, + "metaTI": 28.2248177528, + "metaTV": 11.6350436211, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 12.4453110695, + "openo": 0.213987827301, + "real": 2187.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "140911-120248": { + "TS": 2136.48663521, + "actual": 3071040.0, + "cores": 16, + "metaTI": 23.9372041225, + "metaTV": 11.3822102547, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 12.6650979519, + "openo": 0.218416929245, + "real": 2198.0, + "request": 3029589.93713, + "sys": "yellowstone" + } + }, + "pyreshaper4": { + "140625-111405": { + "TS": 6304.54537344, + "actual": 3071040.0, + "cores": 16, + "metaTI": 42.2605381012, + "metaTV": 12.7368450165, + "metadata": true, + "nodes": 4, + "openi": 9.24561190605, + "openo": 0.189666986465, + "real": 6366.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "140827-154423": { + "TS": 3225.38712144, + "actual": 3071040.0, + "cores": 16, + "metaTI": 44.9510638714, + "metaTV": 10.1678600311, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.8547489643, + "openo": 0.166674852371, + "real": 3321.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "140902-143814": { + "TS": 3119.2581923, + "actual": 3071040.0, + "cores": 16, + "metaTI": 43.4275140762, + "metaTV": 10.9593930244, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 12.8385181427, + "openo": 0.313005208969, + "real": 3195.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "140911-123019": { + "TS": 3223.67403054, + "actual": 3071040.0, + "cores": 16, + "metaTI": 45.1119039059, + "metaTV": 10.2575378418, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.9783298969, + "openo": 0.621034145355, + "real": 3334.0, + "request": 3029589.93713, + "sys": "yellowstone" + } + }, + "pyreshaper4c": { + "140624-201355": { + "TS": 12475.4380803, + "actual": 3071040.0, + "cores": 16, + "metaTI": 60.3435850143, + "metaTV": 7.27081155777, + "metadata": true, + "nodes": 4, + "openi": 12.5250890255, + "openo": 0.35288977623, + "real": 12575.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "140827-164042": { + "TS": 6598.66926503, + "actual": 3071040.0, + "cores": 16, + "metaTI": 61.5178649426, + "metaTV": 9.50716114044, + "metadata": true, + "nodes": 4, + "openi": 10.8506140709, + "openo": 0.240426063538, + "real": 6700.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "140902-124555": { + "TS": 6473.02565169, + "actual": 3039360.0, + "cores": 16, + "metaTI": 20.4248332977, + "metaTV": 2.28913474083, + "metadata": true, + "nodes": 4, + "once": true, + "openi": 10.9985148907, + "openo": 0.308897256851, + "real": 6526.0, + "request": 3029589.8465, + "sys": "yellowstone" + }, + "140902-151821": { + "TS": 6474.7544241, + "actual": 3071040.0, + "cores": 16, + "metaTI": 61.1706421375, + "metaTV": 9.0585565567, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.7027449608, + "openo": 0.26385307312, + "real": 6568.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "140911-132509": { + "TS": 6519.65876412, + "actual": 3071040.0, + "cores": 16, + "metaTI": 61.8785927296, + "metaTV": 9.59517264366, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.9871590137, + "openo": 0.313570022583, + "real": 6624.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "150116-213332": { + "TS": 6497.97398233, + "actual": 3071040.0, + "cores": 16, + "metaTI": 61.8820841312, + "metaTV": 9.12207603455, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 11.5999288559, + "openo": 0.207006931305, + "real": 6622.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "150116-233544": { + "TS": 6279.48014379, + "actual": 3071040.0, + "cores": 64, + "metaTI": 19.3176209927, + "metaTV": 4.49379658699, + "metadata": true, + "nodes": 16, + "once": false, + "openi": 10.7711701393, + "openo": 0.500372886658, + "real": 6354.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "150116-233546": { + "TS": 6259.78720379, + "actual": 3071040.0, + "cores": 160, + "metaTI": 19.9084851742, + "metaTV": 4.1549680233, + "metadata": true, + "nodes": 40, + "once": false, + "openi": 9.19333982468, + "openo": 0.398915052414, + "real": 6331.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "150116-233548": { + "TS": 6262.09757543, + "actual": 3071040.0, + "cores": 96, + "metaTI": 19.8878400326, + "metaTV": 4.18029904366, + "metadata": true, + "nodes": 24, + "once": false, + "openi": 10.7430889606, + "openo": 0.238453149796, + "real": 6333.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "150116-233801": { + "TS": 6387.83472657, + "actual": 3071040.0, + "cores": 32, + "metaTI": 43.1699090004, + "metaTV": 7.70080971718, + "metadata": true, + "nodes": 8, + "once": false, + "openi": 10.7566359043, + "openo": 0.331459999084, + "real": 6491.0, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "150918-160256": { + "TS": 7476.31867456, + "actual": 3071040.0, + "cores": 16, + "metaTI": 220.60375309, + "metaTV": 20.3197700977, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 6.91684889793, + "openo": 0.235767841339, + "real": 7504.10058808, + "request": 3029589.93713, + "sys": "yellowstone" + }, + "151002-111249": { + "TSr": 1027.95776629, + "TSw": 5291.65748668, + "actual": 3121360.0, + "cores": 16, + "metaTIr": 104.953143358, + "metaTIw": 87.6997177601, + "metaTVr": 12.7391045094, + "metaTVw": 1.22518730164, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 16.252808094, + "openo": 0.1517329216, + "real": 6153.10157013, + "request": 3073293.60839, + "sys": "yellowstone" + }, + "151002-134134": { + "TSr": 1017.69567513, + "TSw": 5369.47686791, + "actual": 3121360.0, + "cores": 16, + "metaTIr": 110.276347637, + "metaTIw": 88.3693189621, + "metaTVr": 15.7053070068, + "metaTVw": 19.1175069809, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 12.0257339478, + "openo": 0.108300924301, + "real": 6260.71265197, + "request": 3073293.60839, + "sys": "yellowstone" + }, + "151002-162705": { + "TSr": 1008.87791133, + "TSw": 5330.33792472, + "actual": 3121360.0, + "cores": 16, + "metaTIr": 107.638655424, + "metaTIw": 88.9155454636, + "metaTVr": 14.8814268112, + "metaTVw": 2.1268658638, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 11.387442112, + "openo": 0.150990962982, + "real": 6270.88925004, + "request": 3073293.60839, + "sys": "yellowstone" + }, + "151005-154933": { + "TSr": 1048.0297327, + "TSw": 5279.75957608, + "actual": 3121360.0, + "cores": 16, + "metaTIr": 86.8662247658, + "metaTIw": 88.4578080177, + "metaTVr": 0.743849277496, + "metaTVw": 0.992867469788, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 67.3424155712, + "openo": 0.199492692947, + "real": 6141.01054811, + "request": 3073293.60839, + "sys": "yellowstone" + } + } + }, + "startYear": "0001" + }, + "POP-1.0": { + "baseline": "/glade/u/tdd/asap/bakeoff/tseries/pop-1.0", + "endYear": "0010", + "input": "/glade/u/tdd/asap/bakeoff/hist/pop-1.0", + "isize": 192182.272, + "n2dVars": 78, + "n3dVars": 36, + "nVars": 114, + "osize": 194261.85, + "results": { + "cdo": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 10739.0, + "sys": "caldera" + } + }, + "ncl": { + "140303-095502": { + "cores": 1, + "correct": "fail", + "kernel": 771.41, + "metadata": false, + "nodes": 1, + "real": 31790.53, + "sys": "geyser", + "user": 1118.61 + } + }, + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 5679.0, + "sys": "geyser" + } + }, + "ncr": { + "140226-171901": { + "cores": 4, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1623.0, + "sys": "caldera" + }, + "140312-202524": { + "TS": 1288.62, + "cores": 16, + "correct": "pass", + "metaTI": 14.2, + "metaTV": 116.77, + "metadata": true, + "nodes": 4, + "real": 1465.0, + "sys": "caldera" + } + }, + "pagoda": { + "??????-??????": { + "cores": 4, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 5513.0, + "sys": "caldera" + } + }, + "pynio": { + "140306-104238": { + "cores": 1, + "correct": "fail", + "kernel": 183.7, + "metadata": false, + "nodes": 1, + "real": 6890.64, + "sys": "geyser", + "user": 1043.99 + }, + "140307-094628": { + "TS": 4436.59, + "actual": 242880.0, + "cores": 1, + "correct": "fail", + "kernel": 311.76, + "metaTI": 422.84, + "metaTV": 260.98, + "metadata": true, + "nodes": 1, + "openi": 9.83, + "openo": 1.07, + "real": 5159.22, + "request": 192163.54, + "sys": "geyser", + "user": 1409.84 + } + }, + "pynio4_0": { + "140317-232141": { + "TS": 7754.38, + "actual": 352320.0, + "cores": 1, + "correct": "fail", + "kernel": 298.58, + "metaTI": 88.15, + "metaTV": 177.24, + "metadata": true, + "nodes": 1, + "openi": 9.75, + "openo": 0.17, + "real": 8032.64, + "request": 192163.54, + "sys": "geyser", + "user": 3312.31 + } + }, + "pyniompi": { + "140305-211128": { + "cores": 16, + "correct": "fail", + "kernel": 0.1, + "metadata": false, + "nodes": 4, + "real": 729.3, + "sys": "yellowstone", + "user": 0.23 + }, + "140313-155030": { + "TS": 1103.49, + "actual": 242880.0, + "cores": 16, + "correct": "fail", + "kernel": 0.13, + "metaTI": 7.21, + "metaTV": 9.13, + "metadata": true, + "nodes": 4, + "openi": 12.52, + "openo": 0.09, + "real": 1141.67, + "request": 192163.54, + "sys": "yellowstone", + "user": 0.19 + } + }, + "pyniompi4_0": { + "140318-070023": { + "TS": 1087.52, + "actual": 352320.0, + "cores": 16, + "correct": "fail", + "kernel": 0.23, + "metaTI": 5.21, + "metaTV": 11.24, + "metadata": true, + "nodes": 4, + "openi": 9.47, + "openo": 0.09, + "real": 1122.78, + "request": 192163.54, + "sys": "yellowstone", + "user": 0.17 + } + }, + "pyniompi4_1": { + "140318-092050": { + "TS": 1487.0, + "actual": 352320.0, + "cores": 16, + "correct": "fail", + "kernel": 0.23, + "metaTI": 8.59, + "metaTV": 13.03, + "metadata": true, + "nodes": 4, + "openi": 9.47, + "openo": 0.26, + "real": 1525.21, + "request": 192163.54, + "sys": "yellowstone", + "user": 0.17 + } + }, + "pyreshaper": { + "140624-115036": { + "TS": 1115.6568172, + "actual": 352320.0, + "cores": 16, + "metaTI": 22.2814490795, + "metaTV": 52.5229613781, + "metadata": true, + "nodes": 4, + "openi": 23.7155740261, + "openo": 0.268895864487, + "real": 1213.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "140826-150055": { + "TS": 642.152941704, + "actual": 352320.0, + "cores": 16, + "metaTI": 8.12098765373, + "metaTV": 17.7993154526, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.1301939487, + "openo": 0.259157657623, + "real": 668.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "140902-143159": { + "TS": 792.752401829, + "actual": 352320.0, + "cores": 16, + "metaTI": 6.48322582245, + "metaTV": 19.5578858852, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 7.57899308205, + "openo": 0.230998516083, + "real": 813.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "140911-114206": { + "TS": 873.247316599, + "actual": 352320.0, + "cores": 16, + "metaTI": 11.7036771774, + "metaTV": 32.4409461021, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 11.7263197899, + "openo": 0.302983999252, + "real": 956.0, + "request": 192163.858337, + "sys": "yellowstone" + } + }, + "pyreshaper4": { + "140624-115036": { + "TS": 1145.50996685, + "actual": 352320.0, + "cores": 16, + "metaTI": 20.7704982758, + "metaTV": 45.629805088, + "metadata": true, + "nodes": 4, + "openi": 17.5859360695, + "openo": 0.32540512085, + "real": 1219.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "140826-150056": { + "TS": 644.412581205, + "actual": 352320.0, + "cores": 16, + "metaTI": 8.02404499054, + "metaTV": 16.4856157303, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.1310958862, + "openo": 0.290377378464, + "real": 669.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "140902-135750": { + "TS": 686.631240129, + "actual": 352320.0, + "cores": 16, + "metaTI": 8.02413105965, + "metaTV": 13.8782639503, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.1490437984, + "openo": 0.420978069305, + "real": 753.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "140911-115129": { + "TS": 915.880971432, + "actual": 352320.0, + "cores": 16, + "metaTI": 15.7554838657, + "metaTV": 37.2950565815, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 15.9514939785, + "openo": 0.687720775604, + "real": 1004.0, + "request": 192163.858337, + "sys": "yellowstone" + } + }, + "pyreshaper4c": { + "140624-115037": { + "TS": 1236.12181854, + "actual": 352320.0, + "cores": 16, + "metaTI": 11.7521572113, + "metaTV": 29.1930208206, + "metadata": true, + "nodes": 4, + "openi": 9.35915207863, + "openo": 0.2045814991, + "real": 1299.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "140826-150125": { + "TS": 644.170685768, + "actual": 352320.0, + "cores": 16, + "metaTI": 7.95990419388, + "metaTV": 11.3562419415, + "metadata": true, + "nodes": 4, + "openi": 10.1307721138, + "openo": 0.503438711166, + "real": 698.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "140902-111632": { + "TS": 1113.32402444, + "actual": 243840.0, + "cores": 16, + "metaTI": 1.50622916222, + "metaTV": 2.53401875496, + "metadata": true, + "nodes": 4, + "once": true, + "openi": 7.68989086151, + "openo": 0.324621915817, + "real": 1163.0, + "request": 192163.547974, + "sys": "yellowstone" + }, + "140902-134311": { + "TS": 802.531996727, + "actual": 352320.0, + "cores": 16, + "metaTI": 9.37750196457, + "metaTV": 18.0378842354, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.04029607773, + "openo": 0.221019029617, + "real": 858.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "140902-212043": { + "TS": 1147.33637905, + "actual": 352320.0, + "cores": 16, + "metaTI": 16.6726238728, + "metaTV": 35.6920032501, + "metadata": true, + "nodes": 1, + "once": false, + "openi": 17.8658120632, + "openo": 1.45822024345, + "real": 1192.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "140911-115433": { + "TS": 1125.10803962, + "actual": 352320.0, + "cores": 16, + "metaTI": 15.5659887791, + "metaTV": 24.7654159069, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 15.9480159283, + "openo": 0.308713436127, + "real": 1188.0, + "request": 192163.858337, + "sys": "yellowstone" + }, + "150918-141526": { + "TS": 1033.53211641, + "actual": 352320.0, + "cores": 16, + "metaTI": 13.9353010654, + "metaTV": 26.8450388908, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 9.45551896095, + "openo": 0.459881305695, + "real": 1052.35365105, + "request": 192163.858337, + "sys": "yellowstone" + }, + "151002-094524": { + "TS": 341.83474493, + "actual": 381048.0, + "cores": 16, + "metaTI": 4.18152046204, + "metaTV": 1.73612046242, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 14.1330201626, + "openo": 0.300295114517, + "real": 908.396248817, + "request": 194248.586632, + "sys": "yellowstone" + }, + "151002-121349": { + "TSr": 732.107361078, + "TSw": 340.210021734, + "actual": 381048.0, + "cores": 16, + "metaTIr": 4.61803150177, + "metaTIw": 4.13230228424, + "metaTVr": 16.138954401, + "metaTVw": 1.73543787003, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 10.0871970654, + "openo": 0.285751581192, + "real": 1015.06130505, + "request": 194248.586632, + "sys": "yellowstone" + }, + "151002-145908": { + "TSr": 727.685482979, + "TSw": 344.882488966, + "actual": 381048.0, + "cores": 16, + "metaTIr": 4.81376791, + "metaTIw": 5.70712137222, + "metaTVr": 16.7337415218, + "metaTVw": 3.95496964455, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 12.9056642056, + "openo": 0.517478704453, + "real": 991.616967916, + "request": 194248.586632, + "sys": "yellowstone" + }, + "151005-134536": { + "TSr": 347.185287476, + "TSw": 341.016569376, + "actual": 381048.0, + "cores": 16, + "metaTIr": 7.16235041618, + "metaTIw": 4.25477218628, + "metaTVr": 0.17556476593, + "metaTVw": 1.78972244263, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 51.1564006805, + "openo": 0.26961684227, + "real": 678.237536907, + "request": 194248.586632, + "sys": "yellowstone" + }, + "151005-141834": { + "TSr": 394.218106985, + "TSw": 342.007935047, + "actual": 381048.0, + "cores": 16, + "metaTIr": 7.91029167175, + "metaTIw": 4.11476492882, + "metaTVr": 0.178674936295, + "metaTVw": 1.76582336426, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 59.2391593456, + "openo": 0.231271266937, + "real": 719.421389103, + "request": 194248.586632, + "sys": "yellowstone" + } + } + }, + "startYear": "0001" + }, + "POP-DAILY-1.0": { + "baseline": "/glade/u/tdd/asap/bakeoff/tseries/pop-daily-1.0", + "endYear": "0010", + "input": "/glade/u/tdd/asap/bakeoff/hist/pop-daily-1.0", + "isize": 6859.776, + "n2dVars": 0, + "n3dVars": 4, + "nVars": 4, + "osize": 6963.2, + "results": { + "pyreshaper": { + "140911-111816": { + "TS": 202.800698996, + "actual": 350304.0, + "cores": 16, + "metaTI": 0.725324869156, + "metaTV": 82.8588526249, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 83.4460978508, + "openo": 0.0272371768951, + "real": 398.0, + "request": 208227.04184, + "sys": "yellowstone" + }, + "140911-114043": { + "TS": 384.075725555, + "actual": 350304.0, + "cores": 16, + "metaTI": 1.36809396744, + "metaTV": 219.417865038, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 130.947030067, + "openo": 0.145795822144, + "real": 745.0, + "request": 208227.04184, + "sys": "yellowstone" + } + }, + "pyreshaper4": { + "140911-111815": { + "TS": 213.01793766, + "actual": 350304.0, + "cores": 16, + "metaTI": 0.641710042953, + "metaTV": 73.404512167, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 66.3904781342, + "openo": 0.183240890503, + "real": 376.0, + "request": 208227.04184, + "sys": "yellowstone" + }, + "140911-114834": { + "TS": 334.502142906, + "actual": 350304.0, + "cores": 16, + "metaTI": 2.75354886055, + "metaTV": 189.406442404, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 286.188698053, + "openo": 0.625742197037, + "real": 829.0, + "request": 208227.04184, + "sys": "yellowstone" + } + }, + "pyreshaper4c": { + "140911-111815": { + "TS": 241.960571527, + "actual": 350304.0, + "cores": 16, + "metaTI": 0.735607147217, + "metaTV": 43.8486521244, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 65.7884709835, + "openo": 0.218874931335, + "real": 376.0, + "request": 208227.04184, + "sys": "yellowstone" + }, + "140911-114804": { + "TS": 364.382410526, + "actual": 350304.0, + "cores": 16, + "metaTI": 3.3417840004, + "metaTV": 155.952032328, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 286.188265085, + "openo": 0.610445976257, + "real": 799.0, + "request": 208227.04184, + "sys": "yellowstone" + }, + "150918-140809": { + "TS": 344.406491041, + "actual": 315360.0, + "cores": 16, + "metaTI": 2.29843902588, + "metaTV": 142.575973034, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 165.554438829, + "openo": 0.451182126999, + "real": 484.280472994, + "request": 187507.280182, + "sys": "yellowstone" + }, + "151002-094206": { + "TS": 44.544624567, + "actual": 158608.0, + "cores": 16, + "metaTI": 0.416500329971, + "metaTV": 4.26540613174, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 125.451020956, + "openo": 0.415581941605, + "real": 293.116672993, + "request": 6232.80738831, + "sys": "yellowstone" + }, + "151002-120613": { + "TSr": 288.929755926, + "TSw": 45.5107212067, + "actual": 158608.0, + "cores": 16, + "metaTIr": 1.99480342865, + "metaTIw": 0.461429834366, + "metaTVr": 125.482697725, + "metaTVw": 4.67854905128, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 208.385211945, + "openo": 0.543105840683, + "real": 341.90487504, + "request": 6232.80738831, + "sys": "yellowstone" + }, + "151002-145228": { + "TSr": 222.422555923, + "TSw": 42.7405705452, + "actual": 11296.0, + "cores": 16, + "metaTIr": 1.67179584503, + "metaTIw": 0.42426276207, + "metaTVr": 131.151450157, + "metaTVw": 1.62547826767, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 210.754734993, + "openo": 0.479517936707, + "real": 392.414848089, + "request": 6232.80738831, + "sys": "yellowstone" + }, + "151005-134516": { + "TSr": 272.211297989, + "TSw": 40.9105670452, + "actual": 11296.0, + "cores": 16, + "metaTIr": 2.31458997726, + "metaTIw": 0.411843299866, + "metaTVr": 0.16576385498, + "metaTVw": 0.361858606339, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 166.480087757, + "openo": 0.669489860535, + "real": 479.004032135, + "request": 6232.80738831, + "sys": "yellowstone" + }, + "151005-141854": { + "TSr": 248.436975718, + "TSw": 43.21550107, + "actual": 158608.0, + "cores": 16, + "metaTIr": 2.96689176559, + "metaTIw": 0.44668006897, + "metaTVr": 41.7858102322, + "metaTVw": 4.16977286339, + "metadata": true, + "nodes": 4, + "once": false, + "openi": 169.89680624, + "openo": 0.873800039291, + "real": 467.221978903, + "request": 6232.80738831, + "sys": "yellowstone" + } + } + }, + "startYear": "0001" + } +} \ No newline at end of file diff --git a/tests/yellowstone/timings.json b/tests/yellowstone/timings.json index 513b3b0d..082b0b42 100644 --- a/tests/yellowstone/timings.json +++ b/tests/yellowstone/timings.json @@ -1,36 +1,14 @@ { "CAMFV-1.0": { - "baseline": "/glade/u/tdd/asap/bakeoff/tseries/camfv-1.0", + "baseline": "/glade/p/tdd/asap/bakeoff/tseries/camfv-1.0", "endYear": "1859", - "input": "/glade/u/tdd/asap/bakeoff/hist/camfv-1.0", + "input": "/glade/p/tdd/asap/bakeoff/hist/camfv-1.0", "isize": 28447.744, "n2dVars": 82, "n3dVars": 40, "nVars": 122, "osize": 28525.4, "results": { - "cdo": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 1940.0, - "sys": "caldera" - } - }, - "ncl": { - "140226-105745": { - "cores": 1, - "correct": "fail", - "kernel": 120.93, - "metadata": false, - "nodes": 1, - "real": 1767.76, - "sys": "geyser", - "user": 153.4 - } - }, "nco": { "??????-??????": { "cores": 1, @@ -41,538 +19,63 @@ "sys": "geyser" } }, - "ncr": { - "140226-173803": { - "cores": 4, - "correct": "fail", - "metadata": true, - "nodes": 1, - "real": 3353.0, - "sys": "caldera" - }, - "140303-175041": { - "cores": 4, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 3251.0, - "sys": "caldera" - }, - "140312-182909": { - "TS": 732.7, - "cores": 16, - "correct": "pass", - "metaTI": 1.27, - "metaTV": 164.81, - "metadata": true, - "nodes": 4, - "real": 936.0, - "sys": "caldera" - } - }, - "pagoda": { - "??????-??????": { - "cores": 4, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 1236.0, - "sys": "caldera" - } - }, - "pynio": { - "140305-193740": { - "cores": 1, - "correct": "fail", - "kernel": 32.6, - "metadata": false, - "nodes": 1, - "real": 1523.02, - "sys": "geyser", - "user": 112.72 - }, - "140307-113151": { - "TS": 1463.81, - "actual": 84000.0, - "cores": 1, - "correct": false, - "kernel": 31.82, - "metaTI": 6.29, - "metaTV": 50.86, - "metadata": false, - "nodes": 1, - "openi": 6.65, - "openo": 0.78, - "real": 1542.99, - "request": 28447.83, - "sys": "geyser", - "user": 114.6 - }, - "140307-120707": { - "TS": 1824.31, - "actual": 77760.0, - "cores": 1, - "correct": false, - "kernel": 46.61, - "metaTI": 7.74, - "metaTV": 82.0, - "metadata": true, - "nodes": 1, - "openi": 6.19, - "openo": 0.44, - "real": 1933.83, - "request": 28447.83, - "sys": "geyser", - "user": 141.55 - } - }, - "pynio4_0": { - "140317-232104": { - "TS": 1900.95, - "actual": 956160.0, - "cores": 1, - "correct": false, - "kernel": 64.18, - "metaTI": 2.7, - "metaTV": 161.9, - "metadata": true, - "nodes": 1, - "openi": 6.46, - "openo": 0.26, - "real": 2077.05, - "request": 28447.83, - "sys": "geyser", - "user": 498.48 - } - }, - "pynio4_1": { - "140318-114642": { - "TS": 2706.45, - "actual": 956160.0, - "cores": 1, - "correct": false, - "kernel": 62.85, - "metaTI": 3.59, - "metaTV": 291.47, - "metadata": true, - "nodes": 1, - "openi": 7.08, - "openo": 0.91, - "real": 3020.34, - "request": 28447.83, - "sys": "geyser", - "user": 1174.47 - } - }, - "pyniompi": { - "140305-204127": { - "cores": 16, - "correct": "fail", - "kernel": 0.26, - "metadata": false, - "nodes": 4, - "real": 243.41, - "sys": "yellowstone", - "user": 0.22 - }, - "140310-164122": { - "TS": 197.8, - "actual": 77760.0, - "cores": 16, - "correct": false, - "kernel": 0.09, - "metaTI": 0.49, - "metaTV": 5.04, - "metadata": true, - "nodes": 4, - "openi": 5.71, - "openo": 0.14, - "real": 211.66, - "request": 28447.83, - "sys": "yellowstone", - "user": 0.18 - }, - "140313-154918": { - "TS": 248.07, - "actual": 77760.0, - "cores": 16, - "correct": false, - "kernel": 0.09, - "metaTI": 0.23, - "metaTV": 8.91, - "metadata": true, - "nodes": 4, - "openi": 5.66, - "openo": 0.14, - "real": 268.67, - "request": 28447.83, - "sys": "yellowstone", - "user": 0.18 - } - }, - "pyniompi4_0": { - "140317-234936": { - "TS": 182.06, - "actual": 956160.0, - "cores": 16, - "correct": false, - "kernel": 0.09, - "metaTI": 0.19, - "metaTV": 10.64, - "metadata": true, - "nodes": 4, - "openi": 5.84, - "openo": 0.27, - "real": 207.49, - "request": 28447.83, - "sys": "yellowstone", - "user": 0.18 - } - }, - "pyniompi4_1": { - "140318-092050": { - "TS": 337.4, - "actual": 956160.0, - "cores": 16, - "correct": false, - "kernel": 0.14, - "metaTI": 0.22, - "metaTV": 12.79, - "metadata": true, - "nodes": 4, - "openi": 5.77, - "openo": 0.22, - "real": 362.0, - "request": 28447.83, - "sys": "yellowstone", - "user": 0.16 - } - }, - "pyreshaper": { - "140624-113717": { - "TS": 469.639892101, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.462065219879, - "metaTV": 34.4386823177, - "metadata": true, - "nodes": 4, - "openi": 7.20728111267, - "openo": 0.362703323364, - "real": 499.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140826-145444": { - "TS": 244.512058496, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.462116003036, - "metaTV": 16.0183949471, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 4.00277686119, - "openo": 0.437393665314, - "real": 297.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140902-142335": { - "TS": 258.899525881, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.371918916702, - "metaTV": 23.6432528496, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 3.8312189579, - "openo": 0.474730968475, - "real": 309.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140911-110120": { - "TS": 169.407341719, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.230446100235, - "metaTV": 7.24861335754, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 4.6857380867, - "openo": 0.27854514122, - "real": 221.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140911-112751": { - "TS": 189.411576748, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.28161406517, - "metaTV": 11.0464272499, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 4.41655302048, - "openo": 0.388634204865, - "real": 243.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140915-153852": { - "TS": 139.93287158, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.356719017029, - "metaTV": 7.22233319283, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 3.86684203148, - "openo": 0.50390124321, - "real": 192.0, - "request": 28449.3301392, - "sys": "yellowstone" - } - }, - "pyreshaper4": { - "140624-113934": { - "TS": 481.015773535, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.442744970322, - "metaTV": 34.7662909031, - "metadata": true, - "nodes": 4, - "openi": 10.6994099617, - "openo": 0.54573059082, - "real": 557.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140826-145413": { - "TS": 240.958698273, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.340924263, - "metaTV": 18.6606128216, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 4.05443310738, - "openo": 0.380392551422, - "real": 266.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140902-134715": { - "TS": 185.440463305, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.230117082596, - "metaTV": 15.0816841125, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 4.1193420887, - "openo": 0.602751970291, - "real": 242.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140911-114028": { - "TS": 357.623367548, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.470070123672, - "metaTV": 30.8412137032, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 7.0916261673, - "openo": 0.382592201233, - "real": 436.0, - "request": 28449.3301392, - "sys": "yellowstone" - } - }, "pyreshaper4c": { - "140624-113836": { - "TS": 502.167779446, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.599811792374, - "metaTV": 34.622944355, - "metadata": true, - "nodes": 4, - "openi": 6.6991379261, - "openo": 0.377572059631, - "real": 567.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140826-145444": { - "TS": 239.348858118, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.299512863159, - "metaTV": 15.5699160099, - "metadata": true, - "nodes": 4, - "openi": 4.13505911827, - "openo": 0.544854164124, - "real": 297.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140902-110236": { - "TS": 279.305563927, - "actual": 84960.0, - "cores": 16, - "metaTI": 0.154568433762, - "metaTV": 4.60655140877, - "metadata": true, - "nodes": 4, - "once": true, - "openi": 5.66544604301, - "openo": 0.297202825546, - "real": 327.0, - "request": 28447.8346252, - "sys": "yellowstone" - }, - "140902-133350": { - "TS": 245.287409306, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.318746089935, - "metaTV": 18.6890969276, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 6.27275109291, - "openo": 0.33965086937, - "real": 309.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140902-210623": { - "TS": 300.253802061, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.677854299545, - "metaTV": 23.3515269756, - "metadata": true, - "nodes": 1, - "once": false, - "openi": 4.75587201118, - "openo": 0.537309408188, - "real": 332.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "140911-114232": { - "TS": 401.958108187, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.509034633636, - "metaTV": 27.5770299435, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.226307869, - "openo": 0.364979028702, - "real": 467.0, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "150918-140320": { - "TS": 310.094833136, - "actual": 956160.0, - "cores": 16, - "metaTI": 0.48850774765, - "metaTV": 36.2081618309, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 4.52633786201, - "openo": 0.211630821228, - "real": 329.733877182, - "request": 28449.3301392, - "sys": "yellowstone" - }, - "151002-091959": { - "TS": 60.2626390457, - "actual": 968848.0, - "cores": 16, - "metaTI": 0.261739492416, - "metaTV": 10.9838540554, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 5.89429616928, - "openo": 0.257526159286, - "real": 190.701400042, - "request": 28450.9692535, - "sys": "yellowstone" - }, - "151002-120211": { - "TSr": 267.074586868, - "TSw": 60.4137539864, + "170103-122156": { + "TSr": 71.2765235901, + "TSw": 62.2601282597, "actual": 968848.0, "cores": 16, - "metaTIr": 0.212244749069, - "metaTIw": 0.262345075607, - "metaTVr": 13.3320331573, - "metaTVw": 11.3042991161, + "metaTIr": 0.0619974136353, + "metaTIw": 0.0665917396545, + "metaTVr": 4.64656043053, + "metaTVw": 5.81531739235, "metadata": true, - "nodes": 4, + "nodes": 1, "once": false, - "openi": 4.30114293098, - "openo": 0.335148334503, - "real": 324.800256014, + "openi": 50.3982241154, + "openo": 0.592965602875, + "real": 214.892199993, "request": 28450.9692535, "sys": "yellowstone" }, - "151002-144720": { - "TSr": 267.789553165, - "TSw": 62.3434591293, + "170104-100716": { + "TSr": 67.4400093555, + "TSw": 61.8582854271, "actual": 968848.0, "cores": 16, - "metaTIr": 0.148299455643, - "metaTIw": 0.26377415657, - "metaTVr": 15.7539906502, - "metaTVw": 10.9335258007, + "metaTIr": 0.0617175102234, + "metaTIw": 0.0665860176086, + "metaTVr": 4.61358785629, + "metaTVw": 5.86532330513, "metadata": true, "nodes": 4, "once": false, - "openi": 4.0840959549, - "openo": 0.172574281693, - "real": 323.57800293, + "openi": 52.648058176, + "openo": 0.173426866531, + "real": 212.642313004, "request": 28450.9692535, "sys": "yellowstone" }, - "151005-133333": { - "TSr": 124.339257717, - "TSw": 61.037047863, + "170105-153323": { + "TSr": 12.2236976624, + "TSw": 23.3613986969, "actual": 968848.0, - "cores": 16, - "metaTIr": 0.0115587711334, - "metaTIw": 0.292886972427, - "metaTVr": 1.04787969589, - "metaTVw": 11.0901486874, + "cores": 128, + "metaTIr": 0.00712752342224, + "metaTIw": 0.0605683326721, + "metaTVr": 0.371065378189, + "metaTVw": 0.469061136246, "metadata": true, - "nodes": 4, + "nodes": 8, "once": false, - "openi": 20.3761889935, - "openo": 0.308133363724, - "real": 205.250231981, + "openi": 4.39358782768, + "openo": 0.532613039017, + "real": 45.0535690784, "request": 28450.9692535, "sys": "yellowstone" - }, + } + }, + "pyreshaper4c-v0": { "151005-141124": { "TSr": 251.438382626, "TSw": 60.7321989536, @@ -597,40 +100,7 @@ }, "CAMHF-6HOURLY-1.0": { "results": { - "pyreshaper4c": { - "151002-105208": { - "TS": 3164.16029859, - "actual": 16880504.0, - "cores": 16, - "metaTI": 0.070193529129, - "metaTV": 302.568546534, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 224.951442003, - "openo": 0.540024995804, - "real": 4717.14768195, - "request": 412733.208378, - "sys": "yellowstone" - }, - "151002-132743": { - "TSr": 2102.44257331, - "TSw": 3221.23370409, - "actual": 16880504.0, - "cores": 16, - "metaTIr": 1.16329169273, - "metaTIw": 0.0410597324371, - "metaTVr": 578.328157187, - "metaTVw": 324.140898943, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 319.565881968, - "openo": 0.560957193375, - "real": 5139.13690186, - "request": 412733.208378, - "sys": "yellowstone" - }, + "pyreshaper4c-v0": { "151005-153203": { "TSr": 1839.39458537, "TSw": 3156.90317488, @@ -654,76 +124,7 @@ }, "CAMHF-DAILY-1.0": { "results": { - "pyreshaper4c": { - "151002-094928": { - "TS": 106.482156515, - "actual": 11395904.0, - "cores": 16, - "metaTI": 0.0740783214569, - "metaTV": 146.638347387, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 45.2218770981, - "openo": 0.418075799942, - "real": 1020.13037086, - "request": 39295.5901566, - "sys": "yellowstone" - }, - "151002-121835": { - "TSr": 718.967712879, - "TSw": 109.465676069, - "actual": 11395904.0, - "cores": 16, - "metaTIr": 3.48129415512, - "metaTIw": 0.316914319992, - "metaTVr": 327.981246948, - "metaTVw": 163.808205605, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 51.1060910225, - "openo": 0.799788951874, - "real": 1241.54569411, - "request": 39295.5901566, - "sys": "yellowstone" - }, - "151002-145049": { - "TSr": 240.105856419, - "TSw": 94.4298458099, - "actual": 77864.0, - "cores": 16, - "metaTIr": 3.07635235786, - "metaTIw": 0.10428237915, - "metaTVr": 244.117375135, - "metaTVw": 7.25943517685, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 37.4065859318, - "openo": 0.587407112122, - "real": 469.319635868, - "request": 39295.5901566, - "sys": "yellowstone" - }, - "151005-134305": { - "TSr": 258.39025259, - "TSw": 91.9618022442, - "actual": 77864.0, - "cores": 16, - "metaTIr": 2.48077702522, - "metaTIw": 0.102338075638, - "metaTVr": 67.0126914978, - "metaTVw": 6.04324865341, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 136.388954163, - "openo": 0.433568954468, - "real": 479.639178038, - "request": 39295.5901566, - "sys": "yellowstone" - }, + "pyreshaper4c-v0": { "151005-142821": { "TSr": 725.420089722, "TSw": 107.214648724, @@ -747,91 +148,22 @@ }, "CAMHF-MONTHLY-1.0": { "results": { - "pyreshaper4c": { - "151002-093524": { - "TS": 84.0484206676, - "actual": 1023964.0, - "cores": 16, - "metaTI": 0.197278022766, - "metaTV": 8.23391604424, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 19.332545042, - "openo": 0.158255577087, - "real": 325.792993069, - "request": 41617.1186371, - "sys": "yellowstone" - }, - "151002-120324": { - "TSr": 303.185502052, - "TSw": 84.3467159271, + "pyreshaper4c-v0": { + "151005-141354": { + "TSr": 239.311976194, + "TSw": 84.3950455189, "actual": 1023964.0, "cores": 16, - "metaTIr": 2.37086200714, - "metaTIw": 0.221300363541, - "metaTVr": 10.6845979691, - "metaTVw": 8.21553111076, + "metaTIr": 3.44647073746, + "metaTIw": 0.237613201141, + "metaTVr": 2.22094035149, + "metaTVw": 8.04119968414, "metadata": true, "nodes": 4, "once": false, - "openi": 24.5326368809, - "openo": 0.497053861618, - "real": 375.913740158, - "request": 41617.1186371, - "sys": "yellowstone" - }, - "151002-144915": { - "TSr": 300.091548204, - "TSw": 86.8913040161, - "actual": 1023964.0, - "cores": 16, - "metaTIr": 3.63008785248, - "metaTIw": 0.239748716354, - "metaTVr": 10.3928160667, - "metaTVw": 9.76353907585, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 26.914000988, - "openo": 0.475345373154, - "real": 384.981215954, - "request": 41617.1186371, - "sys": "yellowstone" - }, - "151005-133628": { - "TSr": 136.590059996, - "TSw": 84.718130827, - "actual": 1023964.0, - "cores": 16, - "metaTIr": 1.79091453552, - "metaTIw": 0.195056438446, - "metaTVr": 1.93241810799, - "metaTVw": 8.07740068436, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 75.4882974625, - "openo": 0.338253259659, - "real": 272.33850503, - "request": 41617.1186371, - "sys": "yellowstone" - }, - "151005-141354": { - "TSr": 239.311976194, - "TSw": 84.3950455189, - "actual": 1023964.0, - "cores": 16, - "metaTIr": 3.44647073746, - "metaTIw": 0.237613201141, - "metaTVr": 2.22094035149, - "metaTVw": 8.04119968414, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 118.201842785, - "openo": 0.282948732376, - "real": 418.47419095, + "openi": 118.201842785, + "openo": 0.282948732376, + "real": 418.47419095, "request": 41617.1186371, "sys": "yellowstone" } @@ -839,36 +171,15 @@ } }, "CAMSE-0.25": { - "baseline": "/glade/u/tdd/asap/bakeoff/tseries/camse-0.25", + "baseline": "/glade/p/tdd/asap/bakeoff/tseries/camse-0.25", "endYear": "0010", - "input": "/glade/u/tdd/asap/bakeoff/hist/camse-0.25", + "input": "/glade/p/tdd/asap/bakeoff/hist/camse-0.25", "isize": 1074790.4, "n2dVars": 101, "n3dVars": 97, "nVars": 198, "osize": 1078586.3, "results": { - "cdo": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 35221.0, - "sys": "geyser" - } - }, - "ncl": { - "140228-145958": { - "cores": 1, - "kernel": 1679.35, - "metadata": false, - "nodes": 1, - "real": 25973.02, - "sys": "geyser", - "user": 4582.95 - } - }, "nco": { "??????-??????": { "cores": 1, @@ -879,4355 +190,753 @@ "sys": "geyser" } }, - "ncr": { - "140303-130853": { - "cores": 16, - "correct": "pass", - "metadata": true, - "nodes": 4, - "real": 11597.0, - "sys": "caldera" - }, - "140312-193053": { - "TS": 3243.49, - "cores": 16, - "correct": "pass", - "metaTI": 21.05, - "metaTV": 468.2, - "metadata": true, - "nodes": 4, - "real": 3693.0, - "sys": "caldera" - } - }, - "pagoda": { - "??????-??????": { + "pyreshaper4c": { + "170103-132753": { + "TSr": 489.913909197, + "TSw": 1915.72762108, + "actual": 2561592.0, "cores": 16, - "correct": "pass", - "metadata": true, - "nodes": 4, - "real": 20403.0, - "sys": "caldera" - } - }, - "pynio": { - "140306-094908": { - "cores": 1, - "correct": "fail", - "kernel": 1203.54, - "metadata": false, - "nodes": 1, - "real": 8277.37, - "sys": "geyser", - "user": 3875.32 - }, - "140313-160743": { - "TS": 11855.1, - "actual": 2544960.0, - "cores": 1, - "correct": "fail", - "kernel": 2012.42, - "metaTI": 48.72, - "metaTV": 474.69, + "metaTIr": 1.50844454765, + "metaTIw": 3.52509474754, + "metaTVr": 41.6182866096, + "metaTVw": 5.93010139465, "metadata": true, "nodes": 1, - "openi": 9.67, - "openo": 0.87, - "real": 12393.05, - "request": 1074992.99, - "sys": "geyser", - "user": 5717.35 - } - }, - "pyniompi": { - "140305-204455": { - "cores": 16, - "correct": "fail", - "kernel": 0.13, - "metadata": false, - "nodes": 4, - "real": 830.18, - "sys": "yellowstone", - "user": 0.23 - }, - "140313-160528": { - "TS": 1161.84, - "actual": 1119360.0, - "cores": 16, - "correct": "fail", - "kernel": 0.14, - "metaTI": 2.97, - "metaTV": 21.81, - "metadata": true, - "nodes": 4, - "openi": 11.31, - "openo": 0.09, - "real": 1204.44, - "request": 1074992.99, - "sys": "yellowstone", - "user": 0.24 - } - }, - "pyniompi4_0": { - "140318-070023": { - "TS": 1637.77, - "actual": 2544960.0, - "cores": 16, - "correct": "fail", - "kernel": 0.14, - "metaTI": 3.62, - "metaTV": 26.27, - "metadata": true, - "nodes": 4, - "openi": 10.9, - "openo": 0.3, - "real": 1688.02, - "request": 1074992.99, - "sys": "yellowstone", - "user": 0.24 - } - }, - "pyreshaper": { - "140624-115354": { - "TS": 1400.56879449, - "actual": 2544960.0, - "cores": 16, - "metaTI": 2.51281142235, - "metaTV": 28.7670600414, - "metadata": true, - "nodes": 4, - "openi": 11.0115630627, - "openo": 0.804432153702, - "real": 1485.0, - "request": 1074995.44647, + "once": false, + "openi": 53.0961806774, + "openo": 0.581104278564, + "real": 2540.33662319, + "request": 1078519.71414, "sys": "yellowstone" }, - "140826-185109": { - "TS": 1249.34267521, - "actual": 2544960.0, + "170104-105208": { + "TSr": 368.222280264, + "TSw": 1911.50331664, + "actual": 2561592.0, "cores": 16, - "metaTI": 2.66328287125, - "metaTV": 23.6536338329, + "metaTIr": 1.40527367592, + "metaTIw": 3.70949554443, + "metaTVr": 42.0364191532, + "metaTVw": 5.73445129395, "metadata": true, "nodes": 4, "once": false, - "openi": 10.302508831, - "openo": 1.21547722816, - "real": 1328.0, - "request": 1074995.44647, + "openi": 64.8759138584, + "openo": 0.112359046936, + "real": 2410.84378195, + "request": 1078519.71414, "sys": "yellowstone" }, - "140902-144045": { - "TS": 1260.70107675, - "actual": 2544960.0, - "cores": 16, - "metaTI": 2.78524708748, - "metaTV": 28.7220621109, + "170105-154108": { + "TSr": 69.4597160816, + "TSw": 365.629155874, + "actual": 2561592.0, + "cores": 128, + "metaTIr": 0.417159080505, + "metaTIw": 1.22160601616, + "metaTVr": 12.2830090523, + "metaTVw": 1.82569479942, "metadata": true, - "nodes": 4, + "nodes": 8, "once": false, - "openi": 9.0819709301, - "openo": 0.501886844635, - "real": 1339.0, - "request": 1074995.44647, + "openi": 17.1788246632, + "openo": 0.55075097084, + "real": 446.141501904, + "request": 1078519.71414, "sys": "yellowstone" - }, - "140911-114308": { - "TS": 1073.58587337, - "actual": 2544960.0, + } + }, + "pyreshaper4c-v0": { + "151005-144227": { + "TSr": 273.461855412, + "TSw": 1828.38464713, + "actual": 2561592.0, "cores": 16, - "metaTI": 2.69405317307, - "metaTV": 28.7266523838, + "metaTIr": 0.79497218132, + "metaTIw": 6.30753397942, + "metaTVr": 1.44262719154, + "metaTVw": 10.0376775265, "metadata": true, "nodes": 4, "once": false, - "openi": 10.8840389252, - "openo": 0.400128126144, - "real": 1160.0, - "request": 1074995.44647, + "openi": 86.030144453, + "openo": 0.728790521622, + "real": 2194.46634698, + "request": 1078519.71414, "sys": "yellowstone" } + } + }, + "startYear": "0001" + }, + "CAMSE-1.0": { + "baseline": "/glade/p/tdd/asap/bakeoff/tseries/camse-1.0", + "endYear": "0010", + "input": "/glade/p/tdd/asap/bakeoff/hist/camse-1.0", + "isize": 30681.088, + "n2dVars": 89, + "n3dVars": 43, + "nVars": 132, + "osize": 30848.5, + "results": { + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1675.0, + "sys": "geyser" + } }, - "pyreshaper4": { - "140624-120120": { - "TS": 1781.45158434, - "actual": 2544960.0, + "pyreshaper4c": { + "170103-122314": { + "TSr": 103.217097282, + "TSw": 57.6854994297, + "actual": 1043904.0, "cores": 16, - "metaTI": 3.85545730591, - "metaTV": 25.5409047604, + "metaTIr": 0.131679296494, + "metaTIw": 0.106369972229, + "metaTVr": 7.13957571983, + "metaTVw": 8.63095545769, "metadata": true, - "nodes": 4, - "openi": 8.35853886604, - "openo": 0.244127988815, - "real": 1863.0, - "request": 1074995.44647, + "nodes": 1, + "once": false, + "openi": 75.2417881489, + "openo": 0.863926410675, + "real": 293.722089052, + "request": 30828.9682617, "sys": "yellowstone" }, - "140827-151200": { - "TS": 1299.53353667, - "actual": 2544960.0, + "170104-100834": { + "TSr": 101.295251608, + "TSw": 57.6846690178, + "actual": 1043904.0, "cores": 16, - "metaTI": 4.34471225739, - "metaTV": 20.2178757191, + "metaTIr": 0.132795810699, + "metaTIw": 0.108872890472, + "metaTVr": 7.15904402733, + "metaTVw": 8.73755931854, "metadata": true, "nodes": 4, "once": false, - "openi": 10.7356190681, - "openo": 0.502675294876, - "real": 1378.0, - "request": 1074995.44647, + "openi": 76.6875386238, + "openo": 0.288424253464, + "real": 290.038249969, + "request": 30828.9682617, "sys": "yellowstone" }, - "140902-140613": { - "TS": 1306.83785939, - "actual": 2544960.0, - "cores": 16, - "metaTI": 3.65520238876, - "metaTV": 19.6583509445, + "170105-153322": { + "TSr": 11.5425364971, + "TSw": 23.3573212624, + "actual": 1043904.0, + "cores": 128, + "metaTIr": 0.0133945941925, + "metaTIw": 0.0474498271942, + "metaTVr": 0.720979690552, + "metaTVw": 0.884786367416, "metadata": true, - "nodes": 4, + "nodes": 8, "once": false, - "openi": 8.57616996765, - "openo": 0.55352139473, - "real": 1380.0, - "request": 1074995.44647, + "openi": 4.7198984623, + "openo": 0.576486110687, + "real": 43.5825870037, + "request": 30828.9682617, "sys": "yellowstone" - }, - "140911-115857": { - "TS": 1406.00577044, - "actual": 2544960.0, + } + }, + "pyreshaper4c-v0": { + "151005-141258": { + "TSr": 302.199320793, + "TSw": 65.4644916058, + "actual": 1043904.0, "cores": 16, - "metaTI": 4.1827378273, - "metaTV": 24.5323703289, + "metaTIr": 0.102242946625, + "metaTIw": 0.530309677124, + "metaTVr": 2.07044291496, + "metaTVw": 16.0213608742, "metadata": true, "nodes": 4, "once": false, - "openi": 7.99878907204, - "openo": 0.498881101608, - "real": 1486.0, - "request": 1074995.44647, + "openi": 63.9010326862, + "openo": 0.194200992584, + "real": 430.064186811, + "request": 30828.9682617, "sys": "yellowstone" } + } + }, + "startYear": "0001" + }, + "CICE-0.1": { + "baseline": "/glade/p/tdd/asap/bakeoff/tseries/cice-0.1", + "endYear": "0010", + "input": "/glade/p/tdd/asap/bakeoff/hist/cice-0.1", + "isize": 444023.808, + "n2dVars": 112, + "nVars": 112, + "osize": 42112.4, + "results": { + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 8909.0, + "sys": "geyser" + } }, "pyreshaper4c": { - "140624-154149": { - "TS": 3583.59778595, - "actual": 2544960.0, - "cores": 16, - "metaTI": 8.08878302574, - "metaTV": 30.8035469055, - "metadata": true, - "nodes": 4, - "openi": 10.7068860531, - "openo": 0.415813922882, - "real": 3675.0, - "request": 1074995.44647, - "sys": "yellowstone" - }, - "140827-153424": { - "TS": 2636.91293931, - "actual": 2544960.0, + "170103-125806": { + "TSr": 223.591905355, + "TSw": 377.999773741, + "actual": 715008.0, "cores": 16, - "metaTI": 7.7899210453, - "metaTV": 25.9457168579, + "metaTIr": 26.0964262486, + "metaTIw": 66.0177185535, + "metaTVr": 21.7866501808, + "metaTVw": 0.579627990723, "metadata": true, - "nodes": 4, - "openi": 10.7509250641, - "openo": 0.241142034531, - "real": 2722.0, - "request": 1074995.44647, + "nodes": 1, + "once": false, + "openi": 27.4906201363, + "openo": 0.555342674255, + "real": 752.66496706, + "request": 561093.903809, "sys": "yellowstone" }, - "140902-114116": { - "TS": 2593.2846427, - "actual": 1126560.0, + "170104-102328": { + "TSr": 188.637108326, + "TSw": 373.932635546, + "actual": 715008.0, "cores": 16, - "metaTI": 0.603957891464, - "metaTV": 1.47066664696, + "metaTIr": 32.9010183811, + "metaTIw": 64.7088155746, + "metaTVr": 19.2243871689, + "metaTVw": 0.522397994995, "metadata": true, "nodes": 4, - "once": true, - "openi": 10.3560228348, - "openo": 0.474064826965, - "real": 2647.0, - "request": 1074993.01163, + "once": false, + "openi": 33.5795981884, + "openo": 0.128201246262, + "real": 720.028633118, + "request": 561093.903809, "sys": "yellowstone" }, - "140902-141416": { - "TS": 2652.37349391, - "actual": 2544960.0, - "cores": 16, - "metaTI": 8.0114004612, - "metaTV": 22.7672245502, + "170105-153607": { + "TSr": 30.1504206657, + "TSw": 85.3579831123, + "actual": 715008.0, + "cores": 128, + "metaTIr": 5.26797223091, + "metaTIw": 9.44854044914, + "metaTVr": 3.83238220215, + "metaTVw": 0.0884137153625, "metadata": true, - "nodes": 4, + "nodes": 8, "once": false, - "openi": 10.4009640217, - "openo": 0.888756275177, - "real": 2735.0, - "request": 1074995.44647, + "openi": 5.4131872654, + "openo": 0.547555923462, + "real": 147.479970932, + "request": 561093.903809, "sys": "yellowstone" - }, - "140902-171344": { - "TS": 2968.51429629, - "actual": 2544960.0, + } + }, + "pyreshaper4c-v0": { + "151005-141659": { + "TSr": 153.592193604, + "TSw": 377.051423788, + "actual": 715008.0, "cores": 16, - "metaTI": 8.98694396019, - "metaTV": 33.0424771309, + "metaTIr": 23.6473412514, + "metaTIw": 70.3968391418, + "metaTVr": 0.163871049881, + "metaTVw": 0.453300952911, "metadata": true, "nodes": 4, "once": false, - "openi": 10.9293580055, - "openo": 0.932354211807, - "real": 3033.0, - "request": 1074995.44647, + "openi": 45.6162559986, + "openo": 0.26899600029, + "real": 666.109079123, + "request": 561093.903809, "sys": "yellowstone" - }, - "140902-215120": { - "TS": 2962.92944121, - "actual": 2544960.0, + } + } + }, + "startYear": "0001" + }, + "CICE-1.0": { + "baseline": "/glade/p/tdd/asap/bakeoff/tseries/cice-1.0", + "endYear": "0010", + "input": "/glade/p/tdd/asap/bakeoff/hist/cice-1.0", + "isize": 6596.608, + "n2dVars": 117, + "nVars": 117, + "osize": 3923.2, + "results": { + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 1717.0, + "sys": "geyser" + } + }, + "pyreshaper4c": { + "170103-122035": { + "TSr": 40.3931617737, + "TSw": 7.11516976357, + "actual": 177840.0, "cores": 16, - "metaTI": 8.98501515388, - "metaTV": 29.6636142731, + "metaTIr": 0.471884727478, + "metaTIw": 0.163590192795, + "metaTVr": 27.6176588535, + "metaTVw": 0.573172569275, "metadata": true, "nodes": 1, "once": false, - "openi": 9.88092207909, - "openo": 1.34773635864, - "real": 3029.0, - "request": 1074995.44647, + "openi": 46.3675200939, + "openo": 0.495728731155, + "real": 128.356168985, + "request": 8336.41067505, "sys": "yellowstone" }, - "140903-072528": { - "TS": 1525.11489868, - "actual": 2544960.0, - "cores": 32, - "metaTI": 4.27042007446, - "metaTV": 14.0503649712, + "170104-100543": { + "TSr": 37.2290420532, + "TSw": 7.04892301559, + "actual": 177840.0, + "cores": 16, + "metaTIr": 0.391742229462, + "metaTIw": 0.134761571884, + "metaTVr": 28.5068099499, + "metaTVw": 0.558147907257, "metadata": true, - "nodes": 2, + "nodes": 4, "once": false, - "openi": 9.37944698334, - "openo": 1.15070509911, - "real": 1563.0, - "request": 1074995.44647, + "openi": 32.5075061321, + "openo": 0.560415506363, + "real": 118.988889933, + "request": 8336.41067505, "sys": "yellowstone" }, - "140903-095922": { - "TS": 561.568521738, - "actual": 2544960.0, + "170105-153403": { + "TSr": 6.59751224518, + "TSw": 1.52600550652, + "actual": 177840.0, "cores": 128, - "metaTI": 1.82556605339, - "metaTV": 3.6412024498, + "metaTIr": 0.0741062164307, + "metaTIw": 0.111311912537, + "metaTVr": 3.03483390808, + "metaTVw": 0.0744416713715, "metadata": true, "nodes": 8, "once": false, - "openi": 8.11461997032, - "openo": 1.0275592804, - "real": 619.0, - "request": 1074995.44647, - "sys": "yellowstone" - }, - "140903-100240": { - "TS": 994.121627808, - "actual": 2544960.0, - "cores": 64, - "metaTI": 2.71437478065, - "metaTV": 7.76852416992, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.9681520462, - "openo": 1.23905086517, - "real": 1057.0, - "request": 1074995.44647, + "openi": 6.77716827393, + "openo": 0.797446012497, + "real": 24.2547209263, + "request": 8336.41067505, "sys": "yellowstone" - }, - "140903-100329": { - "TS": 535.087977648, - "actual": 2544960.0, - "cores": 192, - "metaTI": 1.12524223328, - "metaTV": 3.41003489494, - "metadata": true, - "nodes": 12, - "once": false, - "openi": 7.29357194901, - "openo": 0.894973993301, - "real": 589.0, - "request": 1074995.44647, - "sys": "yellowstone" - }, - "140903-100330": { - "TS": 534.806496859, - "actual": 2544960.0, - "cores": 160, - "metaTI": 1.2835021019, - "metaTV": 3.78852272034, - "metadata": true, - "nodes": 10, - "once": false, - "openi": 7.28589892387, - "openo": 1.07611322403, - "real": 590.0, - "request": 1074995.44647, - "sys": "yellowstone" - }, - "140903-100424": { - "TS": 819.987736464, - "actual": 2544960.0, - "cores": 96, - "metaTI": 2.11248207092, - "metaTV": 6.56551790237, - "metadata": true, - "nodes": 6, - "once": false, - "openi": 8.11874198914, - "openo": 0.758881092072, - "real": 876.0, - "request": 1074995.44647, - "sys": "yellowstone" - }, - "140903-105324": { - "TS": 517.756282568, - "actual": 2544960.0, - "cores": 224, - "metaTI": 1.02261710167, - "metaTV": 2.5079562664, - "metadata": true, - "nodes": 14, - "once": false, - "openi": 8.84300518036, - "openo": 0.71942615509, - "real": 572.0, - "request": 1074995.44647, - "sys": "yellowstone" - }, - "140911-122056": { - "TS": 2685.49674749, - "actual": 2544960.0, - "cores": 16, - "metaTI": 8.03190565109, - "metaTV": 26.1082623005, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.18156194687, - "openo": 0.686249017715, - "real": 2771.0, - "request": 1074995.44647, - "sys": "yellowstone" - }, - "150123-111155": { - "TS": 2621.06582308, - "actual": 2544960.0, - "cores": 16, - "metaTI": 7.96389293671, - "metaTV": 38.0018165112, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.776350975, - "openo": 0.612726926804, - "real": 2710.0, - "request": 1074995.44647, - "sys": "yellowstone" - }, - "150918-144422": { - "TS": 2765.84281325, - "actual": 2544960.0, - "cores": 16, - "metaTI": 8.49992275238, - "metaTV": 47.4285974503, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.22591900826, - "openo": 0.864688158035, - "real": 2787.62379503, - "request": 1074995.44647, - "sys": "yellowstone" - }, - "151002-100638": { - "TS": 1835.60106206, - "actual": 2561592.0, - "cores": 16, - "metaTI": 6.68212819099, - "metaTV": 10.6226089001, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.9332418442, - "openo": 0.649513483047, - "real": 2207.67511892, - "request": 1078519.71414, - "sys": "yellowstone" - }, - "151002-123613": { - "TSr": 520.182231903, - "TSw": 1827.02842641, - "actual": 2561592.0, - "cores": 16, - "metaTIr": 2.53915166855, - "metaTIw": 6.77962827682, - "metaTVr": 40.9809556007, - "metaTVw": 12.277674675, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.0326859951, - "openo": 0.439650297165, - "real": 2362.43693519, - "request": 1078519.71414, - "sys": "yellowstone" - }, - "151002-152949": { - "TSr": 1009.50953722, - "TSw": 1854.70439887, - "actual": 2561592.0, - "cores": 16, - "metaTIr": 1.82785892487, - "metaTIw": 8.34366703033, - "metaTVr": 54.4348504543, - "metaTVw": 17.7780208588, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 13.8072900772, - "openo": 0.825301170349, - "real": 2840.41509485, - "request": 1078519.71414, - "sys": "yellowstone" - }, - "151005-144227": { - "TSr": 273.461855412, - "TSw": 1828.38464713, - "actual": 2561592.0, - "cores": 16, - "metaTIr": 0.79497218132, - "metaTIw": 6.30753397942, - "metaTVr": 1.44262719154, - "metaTVw": 10.0376775265, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 86.030144453, - "openo": 0.728790521622, - "real": 2194.46634698, - "request": 1078519.71414, - "sys": "yellowstone" - } - } - }, - "startYear": "0001" - }, - "CAMSE-1.0": { - "baseline": "/glade/u/tdd/asap/bakeoff/tseries/camse-1.0", - "endYear": "0010", - "input": "/glade/u/tdd/asap/bakeoff/hist/camse-1.0", - "isize": 30681.088, - "n2dVars": 89, - "n3dVars": 43, - "nVars": 132, - "osize": 30848.5, - "results": { - "cdo": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 2045.0, - "sys": "caldera" - } - }, - "ncl": { - "140226-105740": { - "cores": 1, - "correct": "fail", - "kernel": 106.11, - "metadata": false, - "nodes": 1, - "real": 1744.31, - "sys": "geyser", - "user": 156.66 - }, - "140310-150352": { - "cores": 1, - "correct": false, - "kernel": 64.39, - "metadata": true, - "nodes": 1, - "real": 1415.25, - "sys": "geyser", - "user": 139.09 - } - }, - "nco": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 1675.0, - "sys": "geyser" - } - }, - "ncr": { - "140226-174338": { - "cores": 4, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 3508.0, - "sys": "caldera" - }, - "140312-174430": { - "TS": 584.01, - "cores": 16, - "correct": "pass", - "metaTI": 1.65, - "metaTV": 144.09, - "metadata": true, - "nodes": 4, - "real": 776.0, - "sys": "caldera" - } - }, - "pagoda": { - "??????-??????": { - "cores": 4, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 1221.0, - "sys": "caldera" - } - }, - "pynio": { - "140306-122536": { - "cores": 1, - "correct": "fail", - "kernel": 47.7, - "metadata": false, - "nodes": 1, - "real": 1186.08, - "sys": "geyser", - "user": 142.21 - }, - "140313-140230": { - "TS": 2018.78, - "actual": 1034400.0, - "cores": 1, - "correct": "fail", - "kernel": 64.35, - "metaTI": 15.56, - "metaTV": 205.56, - "metadata": true, - "nodes": 1, - "openi": 10.53, - "openo": 0.8, - "real": 2252.34, - "request": 30680.31, - "sys": "geyser", - "user": 207.35 - }, - "140314-095342": { - "TS": 1823.92, - "actual": 1034400.0, - "cores": 1, - "correct": "fail", - "kernel": 64.35, - "metaTI": 15.81, - "metaTV": 187.139, - "metadata": true, - "nodes": 1, - "openi": 9.58, - "openo": 1.64, - "real": 2046.33, - "request": 30680.31, - "sys": "geyser", - "user": 209.87 - } - }, - "pynio4_0": { - "140317-211646": { - "TS": 2030.66, - "actual": 1034400.0, - "cores": 1, - "correct": "fail", - "kernel": 71.43, - "metaTI": 5.59, - "metaTV": 154.64, - "metadata": true, - "nodes": 1, - "openi": 7.5, - "openo": 0.72, - "real": 2207.53, - "request": 30680.31, - "sys": "geyser", - "user": 554.99 - } - }, - "pynio4_1": { - "140317-220052": { - "TS": 2487.96, - "actual": 1034400.0, - "cores": 1, - "correct": "fail", - "kernel": 65.25, - "metaTI": 5.93, - "metaTV": 143.5, - "metadata": true, - "nodes": 1, - "openi": 8.76, - "openo": 0.31, - "real": 2655.9, - "request": 30680.31, - "sys": "geyser", - "user": 1237.65 - } - }, - "pyniompi": { - "140305-202628": { - "cores": 16, - "correct": "fail", - "kernel": 0.26, - "metadata": false, - "nodes": 4, - "real": 241.44, - "sys": "yellowstone", - "user": 0.22 - }, - "140307-12401": { - "cores": 16, - "correct": "fail", - "kernel": 0.1, - "metadata": true, - "nodes": 4, - "real": 394.67, - "sys": "yellowstone", - "user": 0.19 - }, - "140313-131051": { - "TS": 326.62, - "actual": 84000.0, - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metaTI": 0.07, - "metaTV": 7.41, - "metadata": true, - "nodes": 4, - "openi": 7.87, - "openo": 0.08, - "real": 351.16, - "request": 30680.31, - "sys": "yellowstone", - "user": 0.18 - }, - "140313-152431": { - "TS": 274.12, - "actual": 84000.0, - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metaTI": 0.85, - "metaTV": 7.67, - "metadata": true, - "nodes": 4, - "openi": 7.87, - "openo": 0.12, - "real": 298.27, - "request": 30680.31, - "sys": "yellowstone", - "user": 0.18 - }, - "140314-095707": { - "TS": 274.95, - "actual": 84000.0, - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metaTI": 0.66, - "metaTV": 3.82, - "metadata": true, - "nodes": 4, - "openi": 6.57, - "openo": 0.09, - "real": 295.59, - "request": 30680.31, - "sys": "yellowstone", - "user": 0.18 - } - }, - "pyniompi4_0": { - "140317-220307": { - "TS": 296.53, - "actual": 1034400.0, - "cores": 16, - "correct": "fail", - "kernel": 0.09, - "metaTI": 0.33, - "metaTV": 8.92, - "metadata": true, - "nodes": 4, - "openi": 6.24, - "openo": 0.14, - "real": 320.67, - "request": 30680.31, - "sys": "yellowstone", - "user": 0.19 - } - }, - "pyniompi4_1": { - "140317-225919": { - "TS": 360.21, - "actual": 1034400.0, - "cores": 16, - "correct": "fail", - "kernel": 0.09, - "metaTI": 0.39, - "metaTV": 15.98, - "metadata": true, - "nodes": 4, - "openi": 6.24, - "openo": 0.14, - "real": 385.35, - "request": 30680.31, - "sys": "yellowstone", - "user": 0.19 - } - }, - "pyreshaper": { - "140624-113737": { - "TS": 461.879812002, - "actual": 1034400.0, - "cores": 16, - "metaTI": 2.49611783028, - "metaTV": 31.9543848038, - "metadata": true, - "nodes": 4, - "openi": 7.89434218407, - "openo": 0.142995357513, - "real": 546.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "140826-145447": { - "TS": 241.934791088, - "actual": 1034400.0, - "cores": 16, - "metaTI": 1.35345077515, - "metaTV": 16.4527909756, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.71761202812, - "openo": 0.303935050964, - "real": 300.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "140902-142338": { - "TS": 248.70229125, - "actual": 1034400.0, - "cores": 16, - "metaTI": 1.40357112885, - "metaTV": 28.3160479069, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.03130292892, - "openo": 0.431185722351, - "real": 312.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "140911-112745": { - "TS": 179.752914667, - "actual": 1034400.0, - "cores": 16, - "metaTI": 1.11748170853, - "metaTV": 11.5109965801, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.6885778904, - "openo": 0.423074483871, - "real": 237.0, - "request": 30681.9406128, - "sys": "yellowstone" - } - }, - "pyreshaper4": { - "140624-113928": { - "TS": 543.640252829, - "actual": 1034400.0, - "cores": 16, - "metaTI": 2.19070911407, - "metaTV": 35.477850914, - "metadata": true, - "nodes": 4, - "openi": 11.187032938, - "openo": 0.304791927338, - "real": 599.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "140826-145447": { - "TS": 242.617993832, - "actual": 1034400.0, - "cores": 16, - "metaTI": 0.595024824142, - "metaTV": 15.4887797832, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.71837592125, - "openo": 0.375550270081, - "real": 300.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "140902-134722": { - "TS": 191.421459198, - "actual": 1034400.0, - "cores": 16, - "metaTI": 0.926884889603, - "metaTV": 14.8415930271, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.79110503197, - "openo": 0.299824237823, - "real": 249.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "140911-114148": { - "TS": 389.183979034, - "actual": 1034400.0, - "cores": 16, - "metaTI": 1.48791742325, - "metaTV": 30.8276884556, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 14.9462389946, - "openo": 0.562728881836, - "real": 457.0, - "request": 30681.9406128, - "sys": "yellowstone" - } - }, - "pyreshaper4c": { - "140624-114030": { - "TS": 529.075484514, - "actual": 1034400.0, - "cores": 16, - "metaTI": 2.13327646255, - "metaTV": 26.9639163017, - "metadata": true, - "nodes": 4, - "openi": 15.6393549442, - "openo": 0.988724708557, - "real": 613.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "140826-145447": { - "TS": 243.839389324, - "actual": 1034400.0, - "cores": 16, - "metaTI": 0.800844907761, - "metaTV": 12.4930071831, - "metadata": true, - "nodes": 4, - "openi": 8.7155380249, - "openo": 0.751301765442, - "real": 300.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "140902-110246": { - "TS": 280.340123892, - "actual": 91200.0, - "cores": 16, - "metaTI": 0.464305877686, - "metaTV": 4.72494339943, - "metadata": true, - "nodes": 4, - "once": true, - "openi": 8.61912894249, - "openo": 0.507271051407, - "real": 337.0, - "request": 30680.3215027, - "sys": "yellowstone" - }, - "140902-133355": { - "TS": 252.199251652, - "actual": 1034400.0, - "cores": 16, - "metaTI": 0.640518188477, - "metaTV": 11.9670262337, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.66032004356, - "openo": 0.285562992096, - "real": 309.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "140902-210623": { - "TS": 287.249596357, - "actual": 1034400.0, - "cores": 16, - "metaTI": 0.681195020676, - "metaTV": 31.8642385006, - "metadata": true, - "nodes": 1, - "once": false, - "openi": 19.1077411175, - "openo": 1.23596668243, - "real": 332.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "140911-114234": { - "TS": 396.764893293, - "actual": 1034400.0, - "cores": 16, - "metaTI": 1.6780602932, - "metaTV": 23.1965000629, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 15.0613510609, - "openo": 0.554198741913, - "real": 469.0, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "150918-140346": { - "TS": 345.118621349, - "actual": 1034400.0, - "cores": 16, - "metaTI": 1.34499812126, - "metaTV": 47.3280603886, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 3.53514695168, - "openo": 0.332359075546, - "real": 377.197709084, - "request": 30681.9406128, - "sys": "yellowstone" - }, - "151002-091617": { - "TS": 65.0861947536, - "actual": 1043904.0, - "cores": 16, - "metaTI": 0.534062385559, - "metaTV": 15.8530282974, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 7.89265394211, - "openo": 0.417589902878, - "real": 198.518553019, - "request": 30828.9682617, - "sys": "yellowstone" - }, - "151002-120250": { - "TSr": 292.098123789, - "TSw": 64.9243013859, - "actual": 1043904.0, - "cores": 16, - "metaTIr": 0.472710609436, - "metaTIw": 0.53111410141, - "metaTVr": 14.7551693916, - "metaTVw": 16.2517373562, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 6.55523085594, - "openo": 0.544054508209, - "real": 370.380084991, - "request": 30828.9682617, - "sys": "yellowstone" - }, - "151002-144824": { - "TSr": 300.078349829, - "TSw": 107.732641459, - "actual": 1043904.0, - "cores": 16, - "metaTIr": 0.865505695343, - "metaTIw": 0.587111234665, - "metaTVr": 12.9958343506, - "metaTVw": 31.3837080002, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 2.28778886795, - "openo": 1.09594678879, - "real": 389.049459934, - "request": 30828.9682617, - "sys": "yellowstone" - }, - "151005-133419": { - "TSr": 142.834536552, - "TSw": 65.3223130703, - "actual": 1043904.0, - "cores": 16, - "metaTIr": 0.0972127914429, - "metaTIw": 0.550078868866, - "metaTVr": 1.72045087814, - "metaTVw": 15.8371667862, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 36.3499717712, - "openo": 0.30549287796, - "real": 248.811717987, - "request": 30828.9682617, - "sys": "yellowstone" - }, - "151005-141258": { - "TSr": 302.199320793, - "TSw": 65.4644916058, - "actual": 1043904.0, - "cores": 16, - "metaTIr": 0.102242946625, - "metaTIw": 0.530309677124, - "metaTVr": 2.07044291496, - "metaTVw": 16.0213608742, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 63.9010326862, - "openo": 0.194200992584, - "real": 430.064186811, - "request": 30828.9682617, - "sys": "yellowstone" - } - } - }, - "startYear": "0001" - }, - "CICE-0.1": { - "baseline": "/glade/u/tdd/asap/bakeoff/tseries/cice-0.1", - "endYear": "0010", - "input": "/glade/u/tdd/asap/bakeoff/hist/cice-0.1", - "isize": 444023.808, - "n2dVars": 112, - "nVars": 112, - "osize": 42112.4, - "results": { - "cdo": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 52644.0, - "sys": "geyser" - } - }, - "ncl": { - "140303-153053": { - "cores": 1, - "correct": "fail", - "kernel": 2041.37, - "metadata": true, - "nodes": 1, - "real": 42931.93, - "sys": "geyser", - "user": 2850.04 - } - }, - "nco": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 8909.0, - "sys": "geyser" - } - }, - "ncr": { - "140226-172014": { - "cores": 4, - "correct": "fail", - "metadata": true, - "nodes": 1, - "real": 1894.0, - "sys": "caldera" - }, - "140312-191124": { - "TS": 1496.09, - "cores": 16, - "correct": "fail", - "metaTI": 340.73, - "metaTV": 382.07, - "metadata": true, - "nodes": 4, - "real": 2275.0, - "sys": "caldera" - } - }, - "pagoda": { - "??????-??????": { - "cores": 16, - "correct": "pass", - "metadata": true, - "nodes": 4, - "real": 7643.0, - "sys": "caldera" - } - }, - "pynio": { - "140306-095228": { - "cores": 1, - "correct": "fail", - "kernel": 615.82, - "metadata": false, - "nodes": 1, - "real": 5123.69, - "sys": "geyser", - "user": 2224.77 - }, - "140313-191710": { - "TS": 8314.32, - "actual": 591360.0, - "cores": 1, - "correct": "fail", - "kernel": 1142.96, - "metaTI": 863.84, - "metaTV": 250.88, - "metadata": true, - "nodes": 1, - "openi": 10.87, - "openo": 0.43, - "real": 9441.99, - "request": 442968.9, - "sys": "yellowstone", - "user": 3773.87 - } - }, - "pynio4_0": { - "140317-232134": { - "TS": 10980.29, - "actual": 591360.0, - "cores": 1, - "correct": "fail", - "kernel": 1239.65, - "metaTI": 2122.46, - "metaTV": 252.9, - "metadata": true, - "nodes": 1, - "openi": 11.49, - "openo": 0.15, - "real": 13369.16, - "request": 442968.9, - "sys": "yellowstone", - "user": 8880.6 - } - }, - "pyniompi": { - "140305-214140": { - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metadata": false, - "nodes": 4, - "real": 499.0, - "sys": "yellowstone", - "user": 0.19 - }, - "140313-160555": { - "TS": 506.93, - "actual": 483840.0, - "cores": 16, - "correct": "fail", - "kernel": 0.33, - "metaTI": 49.12, - "metaTV": 7.06, - "metadata": true, - "nodes": 4, - "openi": 11.02, - "openo": 0.33, - "real": 506.94, - "request": 442968.75, - "sys": "yellowstone", - "user": 0.22 - } - }, - "pyniompi4_0": { - "140318-070023": { - "TS": 625.78, - "actual": 591360.0, - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metaTI": 82.63, - "metaTV": 4.44, - "metadata": true, - "nodes": 4, - "openi": 10.87, - "openo": 0.21, - "real": 723.09, - "request": 442968.75, - "sys": "yellowstone", - "user": 0.22 - } - }, - "pyniompi4_1": { - "140318-092050": { - "TS": 810.31, - "actual": 591360.0, - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metaTI": 109.63, - "metaTV": 12.48, - "metadata": true, - "nodes": 4, - "openi": 11.27, - "openo": 0.21, - "real": 945.86, - "request": 442968.75, - "sys": "yellowstone", - "user": 0.22 - } - }, - "pyreshaper": { - "140624-114000": { - "TS": 526.702405453, - "actual": 591360.0, - "cores": 16, - "metaTI": 68.0012328625, - "metaTV": 12.8914823532, - "metadata": true, - "nodes": 4, - "openi": 9.54658794403, - "openo": 0.496648788452, - "real": 631.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "140826-183939": { - "TS": 516.087095737, - "actual": 591360.0, - "cores": 16, - "metaTI": 68.3514671326, - "metaTV": 4.9983458519, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.4577100277, - "openo": 0.327159166336, - "real": 638.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "140902-142858": { - "TS": 526.552634001, - "actual": 591360.0, - "cores": 16, - "metaTI": 52.2834627628, - "metaTV": 7.26059746742, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.8995859623, - "openo": 0.3378469944, - "real": 632.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "140911-113541": { - "TS": 510.946885824, - "actual": 591360.0, - "cores": 16, - "metaTI": 48.8930177689, - "metaTV": 7.55659413338, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.9417448044, - "openo": 0.309250116348, - "real": 615.0, - "request": 442968.903809, - "sys": "yellowstone" - } - }, - "pyreshaper4": { - "140624-114226": { - "TS": 627.303998947, - "actual": 591360.0, - "cores": 16, - "metaTI": 92.6080269814, - "metaTV": 8.79434752464, - "metadata": true, - "nodes": 4, - "openi": 9.54019212723, - "openo": 0.270900964737, - "real": 777.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "140827-150143": { - "TS": 622.689025164, - "actual": 591360.0, - "cores": 16, - "metaTI": 85.026517868, - "metaTV": 5.28267073631, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.65681600571, - "openo": 0.476130962372, - "real": 761.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "140902-135549": { - "TS": 611.696000099, - "actual": 591360.0, - "cores": 16, - "metaTI": 91.8807518482, - "metaTV": 8.92097759247, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.3194150925, - "openo": 0.477138996124, - "real": 756.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "140911-114735": { - "TS": 663.592682123, - "actual": 591360.0, - "cores": 16, - "metaTI": 115.95199728, - "metaTV": 12.9579110146, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.26338791847, - "openo": 0.33904671669, - "real": 804.0, - "request": 442968.903809, - "sys": "yellowstone" - } - }, - "pyreshaper4c": { - "140624-114838": { - "TS": 764.767405272, - "actual": 591360.0, - "cores": 16, - "metaTI": 111.515741587, - "metaTV": 10.9106748104, - "metadata": true, - "nodes": 4, - "openi": 8.38807797432, - "openo": 0.644446849823, - "real": 946.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "140827-150440": { - "TS": 761.094282866, - "actual": 591360.0, - "cores": 16, - "metaTI": 114.62279439, - "metaTV": 9.76032447815, - "metadata": true, - "nodes": 4, - "openi": 9.65682888031, - "openo": 0.308700799942, - "real": 938.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "140902-111112": { - "TS": 776.177241087, - "actual": 484800.0, - "cores": 16, - "metaTI": 13.9352908134, - "metaTV": 3.02018046379, - "metadata": true, - "nodes": 4, - "once": true, - "openi": 10.2728459835, - "openo": 0.466406345367, - "real": 843.0, - "request": 442968.751373, - "sys": "yellowstone" - }, - "140902-134411": { - "TS": 780.895755291, - "actual": 591360.0, - "cores": 16, - "metaTI": 114.899274826, - "metaTV": 10.0706048012, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 11.0469231606, - "openo": 0.573573112488, - "real": 925.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "140902-211735": { - "TS": 827.541911125, - "actual": 591360.0, - "cores": 16, - "metaTI": 135.3350811, - "metaTV": 12.0612881184, - "metadata": true, - "nodes": 1, - "once": false, - "openi": 11.1946439743, - "openo": 1.40033817291, - "real": 1004.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "140911-115045": { - "TS": 823.123622179, - "actual": 591360.0, - "cores": 16, - "metaTI": 113.007497072, - "metaTV": 10.0991332531, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.7019701004, - "openo": 0.274614095688, - "real": 960.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "150115-172148": { - "TS": 481.313974142, - "actual": 591360.0, - "cores": 32, - "metaTI": 73.4051861763, - "metaTV": 5.76744818687, - "metadata": true, - "nodes": 2, - "once": false, - "openi": 10.6601059437, - "openo": 0.632927179337, - "real": 583.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "150115-174118": { - "TS": 256.810166836, - "actual": 591360.0, - "cores": 64, - "metaTI": 34.1589169502, - "metaTV": 2.17316675186, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.4423499107, - "openo": 0.746605396271, - "real": 337.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "150115-180958": { - "TS": 250.097705364, - "actual": 591360.0, - "cores": 96, - "metaTI": 34.0083539486, - "metaTV": 2.6528646946, - "metadata": true, - "nodes": 6, - "once": false, - "openi": 8.67340183258, - "openo": 0.761589050293, - "real": 338.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "150115-201821": { - "TS": 148.282890081, - "actual": 591360.0, - "cores": 128, - "metaTI": 12.9395039082, - "metaTV": 0.515069484711, - "metadata": true, - "nodes": 8, - "once": false, - "openi": 8.87768483162, - "openo": 0.856188058853, - "real": 212.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "150115-201850": { - "TS": 148.89666605, - "actual": 591360.0, - "cores": 160, - "metaTI": 13.6156411171, - "metaTV": 0.580461025238, - "metadata": true, - "nodes": 10, - "once": false, - "openi": 9.20113587379, - "openo": 0.783620834351, - "real": 213.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "150115-203102": { - "TS": 817.954832315, - "actual": 591360.0, - "cores": 16, - "metaTI": 130.260757685, - "metaTV": 11.7173800468, - "metadata": true, - "nodes": 1, - "once": false, - "openi": 8.85170078278, - "openo": 1.00009465218, - "real": 973.0, - "request": 442968.903809, - "sys": "yellowstone" - }, - "150918-141419": { - "TS": 861.581918955, - "actual": 591360.0, - "cores": 16, - "metaTI": 132.98685503, - "metaTV": 11.4721367359, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.71458387375, - "openo": 0.56676697731, - "real": 1004.08923078, - "request": 442968.903809, - "sys": "yellowstone" - }, - "151002-092925": { - "TS": 376.300118208, - "actual": 715008.0, - "cores": 16, - "metaTI": 70.3662366867, - "metaTV": 0.59819149971, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 11.5092349052, - "openo": 0.235598564148, - "real": 785.061555147, - "request": 561093.903809, - "sys": "yellowstone" - }, - "151002-121023": { - "TSr": 317.063103914, - "TSw": 378.894732237, - "actual": 715008.0, - "cores": 16, - "metaTIr": 48.0411641598, - "metaTIw": 70.4985556602, - "metaTVr": 9.33440876007, - "metaTVw": 0.652191162109, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.04424095154, - "openo": 0.267147064209, - "real": 811.08354497, - "request": 561093.903809, - "sys": "yellowstone" - }, - "151002-150144": { - "TSr": 619.917356491, - "TSw": 379.453398466, - "actual": 715008.0, - "cores": 16, - "metaTIr": 102.077307701, - "metaTIw": 78.5123889446, - "metaTVr": 23.8168251514, - "metaTVw": 2.38540887833, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 21.7854847908, - "openo": 0.407775163651, - "real": 1168.82769799, - "request": 561093.903809, - "sys": "yellowstone" - }, - "151005-134132": { - "TSr": 160.806319952, - "TSw": 379.374226332, - "actual": 715008.0, - "cores": 16, - "metaTIr": 25.2732410431, - "metaTIw": 70.726893425, - "metaTVr": 0.279175758362, - "metaTVw": 0.473909139633, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 48.6959922314, - "openo": 0.246194124222, - "real": 676.466734886, - "request": 561093.903809, - "sys": "yellowstone" - }, - "151005-141659": { - "TSr": 153.592193604, - "TSw": 377.051423788, - "actual": 715008.0, - "cores": 16, - "metaTIr": 23.6473412514, - "metaTIw": 70.3968391418, - "metaTVr": 0.163871049881, - "metaTVw": 0.453300952911, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 45.6162559986, - "openo": 0.26899600029, - "real": 666.109079123, - "request": 561093.903809, - "sys": "yellowstone" - } - } - }, - "startYear": "0001" - }, - "CICE-1.0": { - "baseline": "/glade/u/tdd/asap/bakeoff/tseries/cice-1.0", - "endYear": "0010", - "input": "/glade/u/tdd/asap/bakeoff/hist/cice-1.0", - "isize": 6596.608, - "n2dVars": 117, - "nVars": 117, - "osize": 3923.2, - "results": { - "cdo": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 1266.0, - "sys": "caldera" - } - }, - "ncl": { - "140226-090346": { - "cores": 1, - "correct": "fail", - "kernel": 30.49, - "metadata": false, - "nodes": 1, - "real": 122.42, - "sys": "geyser", - "user": 41.92 - } - }, - "nco": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 1717.0, - "sys": "geyser" - } - }, - "ncr": { - "140226-165327": { - "cores": 4, - "correct": "fail", - "metadata": true, - "nodes": 1, - "real": 351.0, - "sys": "caldera" - }, - "140312-192224": { - "TS": 590.53, - "cores": 16, - "correct": "fail", - "metaTI": 3.88, - "metaTV": 24.01, - "metadata": true, - "nodes": 4, - "real": 646.0, - "sys": "caldera" - } - }, - "pagoda": { - "??????-??????": { - "cores": 4, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 1472.0, - "sys": "caldera" - } - }, - "pynio": { - "140306-085654": { - "cores": 1, - "correct": "fail", - "kernel": 10.99, - "metadata": false, - "nodes": 1, - "real": 583.14, - "sys": "geyser", - "user": 37.36 - }, - "140313-140222": { - "TS": 749.0, - "actual": 56160.0, - "cores": 1, - "correct": "fail", - "kernel": 21.67, - "metaTI": 24.15, - "metaTV": 43.49, - "metadata": true, - "nodes": 1, - "openi": 11.29, - "openo": 0.06, - "real": 829.98, - "request": 6581.25, - "sys": "geyser", - "user": 73.95 - } - }, - "pynio4_0": { - "140317-232127": { - "TS": 738.4, - "actual": 168480.0, - "cores": 1, - "correct": "fail", - "kernel": 19.79, - "metaTI": 36.44, - "metaTV": 29.97, - "metadata": true, - "nodes": 1, - "openi": 8.05, - "openo": 0.14, - "real": 814.78, - "request": 6581.25, - "sys": "geyser", - "user": 150.37 - } - }, - "pynio4_1": { - "140317-114654": { - "TS": 955.55, - "actual": 168480.0, - "cores": 1, - "correct": "fail", - "kernel": 16.99, - "metaTI": 61.52, - "metaTV": 96.4, - "metadata": true, - "nodes": 1, - "openi": 10.16, - "openo": 0.65, - "real": 1125.94, - "request": 6581.25, - "sys": "geyser", - "user": 206.09 - } - }, - "pyniompi": { - "140306-085823": { - "cores": 16, - "correct": "fail", - "kernel": 0.1, - "metadata": false, - "nodes": 4, - "real": 60.68, - "sys": "yellowstone", - "user": 0.18 - }, - "140313-154954": { - "TS": 77.32, - "actual": 56160.0, - "cores": 16, - "correct": "fail", - "kernel": 0.07, - "metaTI": 1.95, - "metaTV": 3.78, - "metadata": true, - "nodes": 4, - "openi": 10.54, - "openo": 0.1, - "real": 98.22, - "request": 6581.25, - "sys": "yellowstone", - "user": 0.19 - } - }, - "pyniompi4_0": { - "140318-070023": { - "TS": 78.05, - "actual": 168480.0, - "cores": 16, - "correct": "fail", - "kernel": 0.07, - "metaTI": 2.96, - "metaTV": 2.97, - "metadata": true, - "nodes": 4, - "openi": 9.39, - "openo": 0.18, - "real": 99.4, - "request": 6581.25, - "sys": "yellowstone", - "user": 0.18 - } - }, - "pyniompi4_1": { - "140318-092051": { - "TS": 73.51, - "actual": 168480.0, - "cores": 16, - "correct": "fail", - "kernel": 0.07, - "metaTI": 2.28, - "metaTV": 2.36, - "metadata": true, - "nodes": 4, - "openi": 9.43, - "openo": 0.32, - "real": 94.29, - "request": 6581.25, - "sys": "yellowstone", - "user": 0.18 - } - }, - "pyreshaper": { - "140624-113558": { - "TS": 265.71512866, - "actual": 168480.0, - "cores": 16, - "metaTI": 5.01533293724, - "metaTV": 11.8876111507, - "metadata": true, - "nodes": 4, - "openi": 19.778968811, - "openo": 0.388194084167, - "real": 341.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "140826-145234": { - "TS": 113.901770353, - "actual": 168480.0, - "cores": 16, - "metaTI": 3.08545422554, - "metaTV": 5.78772473335, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.86034393311, - "openo": 0.28881072998, - "real": 167.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "140902-142114": { - "TS": 118.544679165, - "actual": 168480.0, - "cores": 16, - "metaTI": 3.13189768791, - "metaTV": 13.6027126312, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 7.65998196602, - "openo": 0.59086060524, - "real": 168.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "140911-112739": { - "TS": 73.3629565239, - "actual": 168480.0, - "cores": 16, - "metaTI": 1.93174624443, - "metaTV": 2.94866061211, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 13.4689209461, - "openo": 0.409775018692, - "real": 133.0, - "request": 6581.41067505, - "sys": "yellowstone" - } - }, - "pyreshaper4": { - "140624-113559": { - "TS": 266.083644629, - "actual": 168480.0, - "cores": 16, - "metaTI": 4.95094203949, - "metaTV": 10.9867026806, - "metadata": true, - "nodes": 4, - "openi": 19.8844599724, - "openo": 0.547864675522, - "real": 347.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "140826-145234": { - "TS": 113.744300365, - "actual": 168480.0, - "cores": 16, - "metaTI": 4.30774283409, - "metaTV": 7.11053514481, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.86026287079, - "openo": 0.19504904747, - "real": 167.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "140902-134448": { - "TS": 70.0048351288, - "actual": 168480.0, - "cores": 16, - "metaTI": 2.3086950779, - "metaTV": 3.15719771385, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.03446006775, - "openo": 0.369714021683, - "real": 88.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "140911-113815": { - "TS": 177.28745842, - "actual": 168480.0, - "cores": 16, - "metaTI": 5.62634396553, - "metaTV": 7.50685191154, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 14.2388157845, - "openo": 0.467931509018, - "real": 244.0, - "request": 6581.41067505, - "sys": "yellowstone" - } - }, - "pyreshaper4c": { - "140624-113440": { - "TS": 240.76835227, - "actual": 168480.0, - "cores": 16, - "metaTI": 3.82919192314, - "metaTV": 5.04663062096, - "metadata": true, - "nodes": 4, - "openi": 18.8568351269, - "openo": 0.306517124176, - "real": 311.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "140826-145234": { - "TS": 113.022646427, - "actual": 168480.0, - "cores": 16, - "metaTI": 2.44539260864, - "metaTV": 3.75828146935, - "metadata": true, - "nodes": 4, - "openi": 9.8609559536, - "openo": 0.511830091476, - "real": 167.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "140902-105902": { - "TS": 91.887765646, - "actual": 57120.0, - "cores": 16, - "metaTI": 1.58539175987, - "metaTV": 4.56849193573, - "metadata": true, - "nodes": 4, - "once": true, - "openi": 8.57277202606, - "openo": 0.412314414978, - "real": 113.0, - "request": 6581.25137329, - "sys": "yellowstone" - }, - "140902-133113": { - "TS": 96.3482551575, - "actual": 168480.0, - "cores": 16, - "metaTI": 2.94213962555, - "metaTV": 3.6310069561, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.32849788666, - "openo": 0.319946289062, - "real": 147.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "140902-210319": { - "TS": 104.103149891, - "actual": 168480.0, - "cores": 16, - "metaTI": 6.43604660034, - "metaTV": 20.8584721088, - "metadata": true, - "nodes": 1, - "once": false, - "openi": 18.3028130531, - "openo": 0.899433851242, - "real": 148.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "140911-113908": { - "TS": 196.712829828, - "actual": 168480.0, - "cores": 16, - "metaTI": 5.53936100006, - "metaTV": 6.65181231499, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 17.4854698181, - "openo": 0.200965881348, - "real": 263.0, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "150918-140000": { - "TS": 120.856712103, - "actual": 168480.0, - "cores": 16, - "metaTI": 4.0064227581, - "metaTV": 2.71381306648, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.30154585838, - "openo": 0.190922498703, - "real": 125.465778112, - "request": 6581.41067505, - "sys": "yellowstone" - }, - "151002-093114": { - "TS": 7.29186058044, - "actual": 177840.0, - "cores": 16, - "metaTI": 1.62267613411, - "metaTV": 0.319401979446, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.7917881012, - "openo": 0.256956577301, - "real": 85.7960050106, - "request": 8336.41067505, - "sys": "yellowstone" - }, - "151002-115913": { - "TSr": 131.842990875, - "TSw": 7.56044721603, - "actual": 177840.0, - "cores": 16, - "metaTIr": 1.72517442703, - "metaTIw": 1.63546466827, - "metaTVr": 6.24339962006, - "metaTVw": 0.316979408264, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.83018398285, - "openo": 0.647891044617, - "real": 141.086723089, - "request": 8336.41067505, - "sys": "yellowstone" - }, - "151002-144535": { - "TSr": 164.895143509, - "TSw": 8.39152407646, - "actual": 177840.0, - "cores": 16, - "metaTIr": 2.69702959061, - "metaTIw": 1.62958669662, - "metaTVr": 6.13357424736, - "metaTVw": 0.329475164413, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 12.0156641006, - "openo": 2.29768490791, - "real": 177.504949093, - "request": 8336.41067505, - "sys": "yellowstone" - }, - "151005-133333": { - "TSr": 73.7382771969, - "TSw": 7.17182159424, - "actual": 177840.0, - "cores": 16, - "metaTIr": 1.14449071884, - "metaTIw": 1.64758038521, - "metaTVr": 0.0861296653748, - "metaTVw": 0.355614185333, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 18.1041100025, - "openo": 0.222780942917, - "real": 116.379180908, - "request": 8336.41067505, - "sys": "yellowstone" - }, - "151005-141033": { - "TSr": 179.972988367, - "TSw": 7.15631198883, - "actual": 177840.0, - "cores": 16, - "metaTIr": 1.48803186417, - "metaTIw": 1.63100767136, - "metaTVr": 0.086678981781, - "metaTVw": 0.353240013123, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 19.2583482265, - "openo": 0.235752820969, - "real": 248.850775957, - "request": 8336.41067505, - "sys": "yellowstone" - } - } - }, - "startYear": "0001" - }, - "CLM-0.25": { - "baseline": "/glade/u/tdd/asap/bakeoff/tseries/clmse-0.25", - "endYear": "0010", - "input": "/glade/u/tdd/asap/bakeoff/hist/clmse-0.25", - "isize": 81802.24, - "n2dVars": 150, - "nVars": 150, - "osize": 124702.8, - "results": { - "ncl": { - "140228-145409": { - "cores": 1, - "kernel": 194.42, - "metadata": false, - "nodes": 1, - "real": 3358.85, - "sys": "geyser", - "user": 377.15 - } - }, - "nco": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 4141.0, - "sys": "geyser" - } - }, - "ncr": { - "140303-105425": { - "cores": 12, - "correct": "pass", - "metadata": true, - "real": 4301.0, - "sys": "caldera" - }, - "140312-193441": { - "TS": 385.05, - "cores": 16, - "correct": "pass", - "metaTI": 181.87, - "metaTV": 104.3, - "metadata": true, - "nodes": 4, - "real": 723.0, - "sys": "caldera" - } - }, - "pagoda": { - "??????-??????": { - "cores": 16, - "correct": "pass", - "metadata": true, - "nodes": 4, - "real": 5493.0, - "sys": "caldera" - } - }, - "pynio": { - "140306-091858": { - "cores": 1, - "correct": "fail", - "kernel": 86.93, - "metadata": false, - "nodes": 1, - "real": 1694.12, - "sys": "geyser", - "user": 415.54 - }, - "140313-191915": { - "TS": 2534.72, - "cores": 1, - "kernel": 203.25, - "metaTI": 418.15, - "metaTV": 323.28, - "metadata": true, - "nodes": 1, - "openi": 11.03, - "openo": 0.8, - "real": 3289.98, - "sys": "geyser", - "user": 838.8 - } - }, - "pyniompi": { - "140306-091653": { - "cores": 16, - "correct": "fail", - "kernel": 0.1, - "metadata": false, - "nodes": 4, - "real": 174.86, - "sys": "yellowstone", - "user": 0.17 - }, - "140313-160541": { - "TS": 222.28, - "cores": 16, - "kernel": 0.24, - "metaTI": 26.88, - "metaTV": 9.63, - "metadata": true, - "nodes": 4, - "openi": 10.39, - "openo": 0.09, - "real": 266.2, - "sys": "yellowstone", - "user": 0.29 - } - }, - "pyniompi4_0": { - "140318-070023": { - "TS": 255.0, - "actual": 613440.0, - "cores": 16, - "correct": "fail", - "kernel": 0.1, - "metaTI": 37.85, - "metaTV": 10.41, - "metadata": true, - "nodes": 4, - "openi": 10.81, - "openo": 0.16, - "real": 309.31, - "request": 81515.14, - "sys": "yellowstone", - "user": 0.19 - } - }, - "pyniompi4_1": { - "140318-091904": { - "TS": 296.05, - "actual": 613440.0, - "cores": 16, - "correct": "fail", - "kernel": 0.1, - "metaTI": 46.37, - "metaTV": 12.85, - "metadata": true, - "nodes": 4, - "openi": 10.67, - "openo": 0.23, - "real": 354.78, - "request": 81515.14, - "sys": "yellowstone", - "user": 0.19 - } - }, - "pyreshaper": { - "140624-113329": { - "TS": 217.386395931, - "actual": 749760.0, - "cores": 16, - "metaTI": 27.5108315945, - "metaTV": 11.7934093475, - "metadata": true, - "nodes": 4, - "openi": 8.86293721199, - "openo": 0.513876199722, - "real": 271.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "140826-183413": { - "TS": 228.074203253, - "actual": 749760.0, - "cores": 16, - "metaTI": 27.8776259422, - "metaTV": 14.3903722763, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.19029212, - "openo": 0.420016527176, - "real": 312.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "140902-142333": { - "TS": 229.419958115, - "actual": 749760.0, - "cores": 16, - "metaTI": 28.2502429485, - "metaTV": 12.5835442543, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.6433389187, - "openo": 0.182495117188, - "real": 307.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "140911-113052": { - "TS": 229.645380735, - "actual": 749760.0, - "cores": 16, - "metaTI": 29.3559648991, - "metaTV": 15.3165843487, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.9768149853, - "openo": 0.276948690414, - "real": 318.0, - "request": 81515.3311157, - "sys": "yellowstone" - } - }, - "pyreshaper4": { - "140624-113608": { - "TS": 253.195567369, - "actual": 749760.0, - "cores": 16, - "metaTI": 36.1291060448, - "metaTV": 14.0747671127, - "metadata": true, - "nodes": 4, - "openi": 7.50084495544, - "openo": 0.289590358734, - "real": 351.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "140827-145505": { - "TS": 262.098815203, - "actual": 749760.0, - "cores": 16, - "metaTI": 40.8452181816, - "metaTV": 14.2391810417, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.89155387878, - "openo": 0.295248270035, - "real": 363.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "140902-135021": { - "TS": 264.649072409, - "actual": 749760.0, - "cores": 16, - "metaTI": 38.9351382256, - "metaTV": 14.8586471081, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.3381459713, - "openo": 0.494373321533, - "real": 356.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "140911-114041": { - "TS": 277.828287601, - "actual": 749760.0, - "cores": 16, - "metaTI": 42.8577725887, - "metaTV": 15.5075862408, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.52782797813, - "openo": 0.650231122971, - "real": 390.0, - "request": 81515.3311157, - "sys": "yellowstone" - } - }, - "pyreshaper4c": { - "140624-113607": { - "TS": 293.581233501, - "actual": 749760.0, - "cores": 16, - "metaTI": 44.2273983955, - "metaTV": 11.2281382084, - "metadata": true, - "nodes": 4, - "openi": 7.57388806343, - "openo": 0.776012897491, - "real": 398.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "140827-145505": { - "TS": 259.073281527, - "actual": 749760.0, - "cores": 16, - "metaTI": 44.2974903584, - "metaTV": 7.55031824112, - "metadata": true, - "nodes": 4, - "openi": 8.77940702438, - "openo": 0.629299879074, - "real": 363.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "140902-110305": { - "TS": 301.58286643, - "actual": 106080.0, - "cores": 16, - "metaTI": 15.0113711357, - "metaTV": 2.47730088234, - "metadata": true, - "nodes": 4, - "once": true, - "openi": 10.8128950596, - "openo": 0.278973579407, - "real": 356.0, - "request": 81514.3762207, - "sys": "yellowstone" - }, - "140902-133446": { - "TS": 296.484625578, - "actual": 749760.0, - "cores": 16, - "metaTI": 46.2099742889, - "metaTV": 13.8055045605, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.4409430027, - "openo": 0.632032871246, - "real": 360.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "140902-210745": { - "TS": 310.555556536, - "actual": 749760.0, - "cores": 16, - "metaTI": 54.604319334, - "metaTV": 18.0128221512, - "metadata": true, - "nodes": 1, - "once": false, - "openi": 10.94216609, - "openo": 0.77930521965, - "real": 414.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "140911-114209": { - "TS": 329.421911955, - "actual": 749760.0, - "cores": 16, - "metaTI": 49.9806575775, - "metaTV": 15.6132264137, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.77800393105, - "openo": 0.532899856567, - "real": 444.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "150115-171626": { - "TS": 208.854488611, - "actual": 749760.0, - "cores": 32, - "metaTI": 27.8093998432, - "metaTV": 9.49487662315, - "metadata": true, - "nodes": 2, - "once": false, - "openi": 10.8636159897, - "openo": 0.587514877319, - "real": 261.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "150115-174419": { - "TS": 166.342139482, - "actual": 749760.0, - "cores": 64, - "metaTI": 14.7328977585, - "metaTV": 4.93773913383, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.89057898521, - "openo": 0.849577188492, - "real": 235.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "150115-181055": { - "TS": 155.178570032, - "actual": 749760.0, - "cores": 96, - "metaTI": 10.2803800106, - "metaTV": 2.88407659531, - "metadata": true, - "nodes": 6, - "once": false, - "openi": 9.95401000977, - "openo": 0.725273132324, - "real": 229.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "150115-201849": { - "TS": 140.330249071, - "actual": 749760.0, - "cores": 160, - "metaTI": 4.11911892891, - "metaTV": 8.6886074543, - "metadata": true, - "nodes": 10, - "once": false, - "openi": 8.32369208336, - "openo": 0.702972888947, - "real": 212.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "150115-202111": { - "TS": 299.16028142, - "actual": 749760.0, - "cores": 16, - "metaTI": 50.0943911076, - "metaTV": 13.3113751411, - "metadata": true, - "nodes": 1, - "once": false, - "openi": 9.67527198792, - "openo": 0.882090806961, - "real": 382.0, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "150918-140302": { - "TS": 225.530354023, - "actual": 749760.0, - "cores": 16, - "metaTI": 71.1400485039, - "metaTV": 30.6447684765, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.88314318657, - "openo": 0.319357633591, - "real": 307.761037111, - "request": 81515.3311157, - "sys": "yellowstone" - }, - "151002-093416": { - "TS": 64.9041206837, - "actual": 798360.0, - "cores": 16, - "metaTI": 42.5193638802, - "metaTV": 3.18136024475, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.3529610634, - "openo": 0.151391983032, - "real": 261.155253887, - "request": 124675.246468, - "sys": "yellowstone" - }, - "151002-120117": { - "TSr": 121.410551786, - "TSw": 64.8986198902, - "actual": 798360.0, - "cores": 16, - "metaTIr": 24.5644390583, - "metaTIw": 42.7191648483, - "metaTVr": 19.7610211372, - "metaTVw": 3.1215763092, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 11.1755421162, - "openo": 0.393360853195, - "real": 262.587915897, - "request": 124675.246468, - "sys": "yellowstone" - }, - "151002-145027": { - "TSr": 257.785825014, - "TSw": 95.3491647243, - "actual": 798360.0, - "cores": 16, - "metaTIr": 49.3381197453, - "metaTIw": 55.4652349949, - "metaTVr": 50.4830415249, - "metaTVw": 4.86789798737, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 21.76009202, - "openo": 0.745949745178, - "real": 462.231551886, - "request": 124675.246468, - "sys": "yellowstone" - }, - "151005-133706": { - "TSr": 127.764489651, - "TSw": 65.0523197651, - "actual": 798360.0, - "cores": 16, - "metaTIr": 11.5128691196, - "metaTIw": 43.0957920551, - "metaTVr": 0.697492361069, - "metaTVw": 3.21077013016, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 71.471419096, - "openo": 0.5601978302, - "real": 319.536077023, - "request": 124675.246468, - "sys": "yellowstone" - }, - "151005-141143": { - "TSr": 124.386564493, - "TSw": 65.0036051273, - "actual": 798360.0, - "cores": 16, - "metaTIr": 11.1683745384, - "metaTIw": 42.7969913483, - "metaTVr": 0.692959308624, - "metaTVw": 2.90459418297, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 69.9783821106, - "openo": 0.696071147919, - "real": 313.190394878, - "request": 124675.246468, - "sys": "yellowstone" - } - } - }, - "startYear": "0001" - }, - "CLM-1.0": { - "baseline": "/glade/u/tdd/asap/bakeoff/tseries/clmse-1.0", - "endYear": "0010", - "input": "/glade/u/tdd/asap/bakeoff/hist/clmse-1.0", - "isize": 8694.784, - "n2dVars": 297, - "n3dVars": 0, - "nVars": 297, - "osize": 14051.0, - "results": { - "cdo": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 2788.0, - "sys": "caldera" - } - }, - "ncl": { - "140228-142411": { - "cores": 1, - "correct": "fail", - "kernel": 45.35, - "metadata": false, - "nodes": 1, - "real": 205.48, - "sys": "geyser", - "user": 47.44 - }, - "140228-143030": { - "cores": 1, - "correct": "fail", - "kernel": 44.6, - "metadata": false, - "nodes": 1, - "real": 220.15, - "sys": "geyser", - "user": 48.45 - } - }, - "nco": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 4321.0, - "sys": "geyser" - } - }, - "ncr": { - "140226-173600": { - "cores": 4, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 2713.0, - "sys": "caldera" - }, - "140312-200047": { - "TS": 1153.94, - "cores": 16, - "correct": "pass", - "metaTI": 23.35, - "metaTV": 542.14, - "metadata": true, - "nodes": 4, - "real": 1782.0, - "sys": "caldera" - } - }, - "pagoda": { - "??????-??????": { - "cores": 4, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 1789.0, - "sys": "caldera" - } - }, - "pynio": { - "140306-090318": { - "cores": 1, - "correct": "fail", - "kernel": 28.22, - "metadata": false, - "nodes": 1, - "real": 1359.24, - "sys": "geyser", - "user": 68.68 - }, - "140313-140215": { - "TS": 1235.35, - "actual": 147840.0, - "cores": 1, - "correct": "fail", - "kernel": 55.29, - "metaTI": 60.26, - "metaTV": 135.67, - "metadata": true, - "nodes": 1, - "openi": 13.75, - "openo": 1.95, - "real": 1452.57, - "request": 8832.56, - "sys": "geyser", - "user": 162.48 - } - }, - "pynio4_0": { - "140317-232111": { - "TS": 1174.86, - "actual": 1148160.0, - "cores": 1, - "correct": "fail", - "kernel": 47.37, - "metaTI": 106.8, - "metaTV": 124.61, - "metadata": true, - "nodes": 1, - "openi": 11.67, - "openo": 0.5, - "real": 1421.16, - "request": 8678.32, - "sys": "geyser", - "user": 304.33 - } - }, - "pynio4_1": { - "140318-114712": { - "TS": 1451.53, - "actual": 1148160.0, - "cores": 1, - "correct": "fail", - "kernel": 43.67, - "metaTI": 146.12, - "metaTV": 206.4, - "metadata": true, - "nodes": 1, - "openi": 11.81, - "openo": 1.66, - "real": 1820.33, - "request": 8678.32, - "sys": "geyser", - "user": 426.28 - } - }, - "pyniompi": { - "140306-091538": { - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metadata": false, - "nodes": 4, - "real": 129.54, - "sys": "yellowstone", - "user": 0.2 - }, - "140313-131051": { - "TS": 146.97, - "actual": 143520.0, - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metaTI": 1.8, - "metaTV": 3.95, - "metadata": true, - "nodes": 4, - "openi": 10.49, - "openo": 0.12, - "real": 172.44, - "request": 8676.81, - "sys": "yellowstone", - "user": 0.19 - }, - "140313-155000": { - "TS": 158.54, - "actual": 143520.0, - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metaTI": 1.8, - "metaTV": 5.83, - "metadata": true, - "nodes": 4, - "openi": 10.49, - "openo": 0.12, - "real": 183.07, - "request": 8676.81, - "sys": "yellowstone", - "user": 0.19 - }, - "140314-101030": { - "TS": 112.91, - "actual": 1148160.0, - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metaTI": 2.66, - "metaTV": 5.16, - "metadata": true, - "nodes": 4, - "openi": 9.67, - "openo": 0.13, - "real": 139.56, - "request": 8676.81, - "sys": "yellowstone", - "user": 0.18 - } - }, - "pyniompi4_0": { - "140318-070023": { - "TS": 156.21, - "actual": 1148160.0, - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metaTI": 6.95, - "metaTV": 6.95, - "metadata": true, - "nodes": 4, - "openi": 10.6, - "openo": 0.33, - "real": 189.24, - "request": 8676.81, - "sys": "yellowstone", - "user": 0.18 - } - }, - "pyniompi4_1": { - "140318-091904": { - "TS": 129.37, - "actual": 1148160.0, - "cores": 16, - "correct": "fail", - "kernel": 0.11, - "metaTI": 5.47, - "metaTV": 4.62, - "metadata": true, - "nodes": 4, - "openi": 9.89, - "openo": 0.2, - "real": 151.48, - "request": 8676.81, - "sys": "yellowstone", - "user": 0.18 - } - }, - "pyreshaper": { - "140624-113616": { - "TS": 362.838088274, - "actual": 1425600.0, - "cores": 16, - "metaTI": 3.64182257652, - "metaTV": 7.80311584473, - "metadata": true, - "nodes": 4, - "openi": 13.7385029793, - "openo": 0.708451747894, - "real": 427.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "140826-145351": { - "TS": 185.680801392, - "actual": 1425600.0, - "cores": 16, - "metaTI": 3.53920269012, - "metaTV": 6.57377076149, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.960504055, - "openo": 0.712677240372, - "real": 244.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "140902-142228": { - "TS": 162.275549173, - "actual": 1425600.0, - "cores": 16, - "metaTI": 11.8885095119, - "metaTV": 34.244243145, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 8.73659610748, - "openo": 0.758320569992, - "real": 242.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "140911-112912": { - "TS": 119.784294128, - "actual": 1425600.0, - "cores": 16, - "metaTI": 4.4601495266, - "metaTV": 7.02146315575, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 11.810557127, - "openo": 0.630494594574, - "real": 182.0, - "request": 8678.71307373, - "sys": "yellowstone" - } - }, - "pyreshaper4": { - "140624-113802": { - "TS": 384.561959028, - "actual": 1425600.0, - "cores": 16, - "metaTI": 6.64893960953, - "metaTV": 15.0808794498, - "metadata": true, - "nodes": 4, - "openi": 19.5430710316, - "openo": 0.769854307175, - "real": 465.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "140826-145351": { - "TS": 176.847893715, - "actual": 1425600.0, - "cores": 16, - "metaTI": 6.18663406372, - "metaTV": 16.3740646839, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.9581079483, - "openo": 0.189343929291, - "real": 244.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "140902-134804": { - "TS": 128.163312912, - "actual": 1425600.0, - "cores": 16, - "metaTI": 4.74730086327, - "metaTV": 12.2831828594, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 11.5698750019, - "openo": 1.13589835167, - "real": 196.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "140911-113952": { - "TS": 293.202519417, - "actual": 1425600.0, - "cores": 16, - "metaTI": 8.82940626144, - "metaTV": 19.2759275436, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 14.2772920132, - "openo": 1.35260295868, - "real": 341.0, - "request": 8678.71307373, - "sys": "yellowstone" - } - }, - "pyreshaper4c": { - "140624-113733": { - "TS": 385.772614479, - "actual": 1425600.0, - "cores": 16, - "metaTI": 7.3999941349, - "metaTV": 12.4398062229, - "metadata": true, - "nodes": 4, - "openi": 19.5401780605, - "openo": 0.989186286926, - "real": 436.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "140826-145351": { - "TS": 175.992659807, - "actual": 1425600.0, - "cores": 16, - "metaTI": 6.88678193092, - "metaTV": 14.0521204472, - "metadata": true, - "nodes": 4, - "openi": 10.9594869614, - "openo": 0.499489068985, - "real": 244.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "140902-110102": { - "TS": 177.58859539, - "actual": 146880.0, - "cores": 16, - "metaTI": 0.88392496109, - "metaTV": 4.15271639824, - "metadata": true, - "nodes": 4, - "once": true, - "openi": 11.6738321781, - "openo": 0.889152765274, - "real": 233.0, - "request": 8676.81610107, - "sys": "yellowstone" - }, - "140902-133236": { - "TS": 163.407021046, - "actual": 1425600.0, - "cores": 16, - "metaTI": 5.86030912399, - "metaTV": 11.3345866203, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 11.1746912003, - "openo": 0.731638431549, - "real": 230.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "140902-210436": { - "TS": 158.254923105, - "actual": 1425600.0, - "cores": 16, - "metaTI": 18.5883505344, - "metaTV": 33.6075720787, - "metadata": true, - "nodes": 1, - "once": false, - "openi": 20.3818750381, - "openo": 1.33059597015, - "real": 225.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "140911-114047": { - "TS": 284.039642811, - "actual": 1425600.0, - "cores": 16, - "metaTI": 6.48289012909, - "metaTV": 12.7032730579, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 15.5982160568, - "openo": 0.793869018555, - "real": 362.0, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "150918-140219": { - "TS": 242.346611023, - "actual": 1425600.0, - "cores": 16, - "metaTI": 8.48106193542, - "metaTV": 15.5442285538, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.54877281189, - "openo": 1.01668596268, - "real": 264.768231869, - "request": 8678.71307373, - "sys": "yellowstone" - }, - "151002-093313": { - "TS": 10.7341187, - "actual": 1443420.0, - "cores": 16, - "metaTI": 5.90268445015, - "metaTV": 10.5968191624, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.87163710594, - "openo": 0.614326238632, - "real": 204.401623964, - "request": 14019.9852104, - "sys": "yellowstone" - }, - "151002-120140": { - "TSr": 255.788938046, - "TSw": 9.34615063667, - "actual": 1443420.0, - "cores": 16, - "metaTIr": 1.61454749107, - "metaTIw": 5.90498423576, - "metaTVr": 8.47008776665, - "metaTVw": 10.5631215572, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 9.37232899666, - "openo": 0.265953063965, - "real": 287.702008009, - "request": 14019.9852104, - "sys": "yellowstone" - }, - "151002-144726": { - "TSr": 269.169423342, - "TSw": 13.1295950413, - "actual": 1443420.0, - "cores": 16, - "metaTIr": 1.14924407005, - "metaTIw": 5.88382959366, - "metaTVr": 4.54588770866, - "metaTVw": 10.5600767136, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.0409920216, - "openo": 1.07933664322, - "real": 300.97479105, - "request": 14019.9852104, - "sys": "yellowstone" - }, - "151005-133542": { - "TSr": 161.287609339, - "TSw": 9.55008554459, - "actual": 1443420.0, - "cores": 16, - "metaTIr": 1.74680042267, - "metaTIw": 5.95859718323, - "metaTVr": 1.14477300644, - "metaTVw": 10.8875396252, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 86.8880727291, - "openo": 0.510288476944, - "real": 287.98132515, - "request": 14019.9852104, - "sys": "yellowstone" - }, - "151005-141339": { - "TSr": 312.091857433, - "TSw": 9.57465410233, - "actual": 1443420.0, - "cores": 16, - "metaTIr": 3.0339858532, - "metaTIw": 5.92696642876, - "metaTVr": 1.17320466042, - "metaTVw": 10.8604056835, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 91.3150508404, - "openo": 0.4595246315, - "real": 435.240852833, - "request": 14019.9852104, - "sys": "yellowstone" - } - } - }, - "startYear": "0001" - }, - "POP-0.1": { - "baseline": "/glade/u/tdd/asap/bakeoff/tseries/pop-0.1", - "endYear": "0010", - "input": "/glade/u/tdd/asap/bakeoff/hist/pop-0.1", - "isize": 3030384.64, - "n2dVars": 23, - "n3dVars": 11, - "nVars": 34, - "osize": 3124562.75, - "results": { - "ncl": { - "140306-200340": { - "cores": 1, - "correct": "fail", - "kernel": 4956.16, - "metadata": false, - "nodes": 1, - "real": 30509.62, - "sys": "geyser", - "user": 12327.82 - } - }, - "nco": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 52558.0, - "sys": "geyser" - } - }, - "ncr": { - "140312-220039": { - "TS": 8365.7, - "cores": 16, - "correct": "pass", - "metaTI": 255.62, - "metaTV": 78.48, - "metadata": true, - "nodes": 4, - "real": 8745.0, - "sys": "caldera" - } - }, - "pagoda": { - "??????-??????": { - "cores": 16, - "correct": "pass", - "nodes": 4, - "real": 53051.0, - "sys": "geyser" - } - }, - "pynio": { - "140306-092824": { - "cores": 1, - "correct": "fail", - "kernel": 4047.26, - "metadata": false, - "nodes": 1, - "real": 20526.06, - "sys": "geyser", - "user": 14145.3 - }, - "140313-192244": { - "TS": 33980.16, - "actual": 3071040.0, - "cores": 1, - "correct": false, - "kernel": 6749.21, - "metaTI": 343.98, - "metaTV": 129.17, - "metadata": true, - "nodes": 1, - "openi": 10.93, - "openo": 0.1, - "real": 34465.08, - "request": 3029589.9, - "sys": "geyser", - "user": 22193.31 - } - }, - "pyniompi": { - "140314-130728": { - "TS": 4157.04, - "actual": 3071040.0, - "cores": 16, - "correct": false, - "kernel": 0.11, - "metaTI": 24.89, - "metaTV": 9.75, - "metadata": true, - "nodes": 4, - "openi": 12.99, - "openo": 0.16, - "real": 4205.33, - "request": 3029589.94, - "sys": "yellowstone", - "user": 0.23 } }, - "pyniompi4_0": { - "140318-070102": { - "TS": 6096.63, - "actual": 3071040.0, - "cores": 16, - "correct": false, - "kernel": 0.24, - "metaTI": 40.16, - "metaTV": 8.26, - "metadata": true, - "nodes": 4, - "openi": 9.97, - "openo": 0.16, - "real": 6148.8, - "request": 3029589.94, - "sys": "yellowstone", - "user": 0.43 - } - }, - "pyniompi4_1": { - "140318-092050": { - "TS": 12840.39, - "actual": 3071040.0, - "cores": 16, - "correct": false, - "kernel": 0.24, - "metaTI": 62.78, - "metaTV": 7.74, - "metadata": true, - "nodes": 4, - "openi": 11.32, - "openo": 0.26, - "real": 12906.67, - "request": 3029589.94, - "sys": "yellowstone", - "user": 0.43 - } - }, - "pyreshaper": { - "140625-133224": { - "TS": 4188.19754171, - "actual": 3071040.0, - "cores": 16, - "metaTI": 23.1918101311, - "metaTV": 10.8097374439, - "metadata": true, - "nodes": 4, - "openi": 9.9564101696, - "openo": 0.165270090103, - "real": 4269.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "140826-190453": { - "TS": 2057.59516311, - "actual": 3071040.0, - "cores": 16, - "metaTI": 30.1707749367, - "metaTV": 9.16880011559, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 12.6679458618, - "openo": 0.262907028198, - "real": 2152.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "140902-145453": { - "TS": 2094.32604527, - "actual": 3071040.0, - "cores": 16, - "metaTI": 28.2248177528, - "metaTV": 11.6350436211, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 12.4453110695, - "openo": 0.213987827301, - "real": 2187.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "140911-120248": { - "TS": 2136.48663521, - "actual": 3071040.0, - "cores": 16, - "metaTI": 23.9372041225, - "metaTV": 11.3822102547, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 12.6650979519, - "openo": 0.218416929245, - "real": 2198.0, - "request": 3029589.93713, - "sys": "yellowstone" - } - }, - "pyreshaper4": { - "140625-111405": { - "TS": 6304.54537344, - "actual": 3071040.0, - "cores": 16, - "metaTI": 42.2605381012, - "metaTV": 12.7368450165, - "metadata": true, - "nodes": 4, - "openi": 9.24561190605, - "openo": 0.189666986465, - "real": 6366.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "140827-154423": { - "TS": 3225.38712144, - "actual": 3071040.0, - "cores": 16, - "metaTI": 44.9510638714, - "metaTV": 10.1678600311, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.8547489643, - "openo": 0.166674852371, - "real": 3321.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "140902-143814": { - "TS": 3119.2581923, - "actual": 3071040.0, - "cores": 16, - "metaTI": 43.4275140762, - "metaTV": 10.9593930244, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 12.8385181427, - "openo": 0.313005208969, - "real": 3195.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "140911-123019": { - "TS": 3223.67403054, - "actual": 3071040.0, - "cores": 16, - "metaTI": 45.1119039059, - "metaTV": 10.2575378418, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.9783298969, - "openo": 0.621034145355, - "real": 3334.0, - "request": 3029589.93713, - "sys": "yellowstone" - } - }, - "pyreshaper4c": { - "140624-201355": { - "TS": 12475.4380803, - "actual": 3071040.0, - "cores": 16, - "metaTI": 60.3435850143, - "metaTV": 7.27081155777, - "metadata": true, - "nodes": 4, - "openi": 12.5250890255, - "openo": 0.35288977623, - "real": 12575.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "140827-164042": { - "TS": 6598.66926503, - "actual": 3071040.0, - "cores": 16, - "metaTI": 61.5178649426, - "metaTV": 9.50716114044, - "metadata": true, - "nodes": 4, - "openi": 10.8506140709, - "openo": 0.240426063538, - "real": 6700.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "140902-124555": { - "TS": 6473.02565169, - "actual": 3039360.0, - "cores": 16, - "metaTI": 20.4248332977, - "metaTV": 2.28913474083, - "metadata": true, - "nodes": 4, - "once": true, - "openi": 10.9985148907, - "openo": 0.308897256851, - "real": 6526.0, - "request": 3029589.8465, - "sys": "yellowstone" - }, - "140902-151821": { - "TS": 6474.7544241, - "actual": 3071040.0, - "cores": 16, - "metaTI": 61.1706421375, - "metaTV": 9.0585565567, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.7027449608, - "openo": 0.26385307312, - "real": 6568.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "140911-132509": { - "TS": 6519.65876412, - "actual": 3071040.0, - "cores": 16, - "metaTI": 61.8785927296, - "metaTV": 9.59517264366, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.9871590137, - "openo": 0.313570022583, - "real": 6624.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "150116-213332": { - "TS": 6497.97398233, - "actual": 3071040.0, - "cores": 16, - "metaTI": 61.8820841312, - "metaTV": 9.12207603455, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 11.5999288559, - "openo": 0.207006931305, - "real": 6622.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "150116-233544": { - "TS": 6279.48014379, - "actual": 3071040.0, - "cores": 64, - "metaTI": 19.3176209927, - "metaTV": 4.49379658699, - "metadata": true, - "nodes": 16, - "once": false, - "openi": 10.7711701393, - "openo": 0.500372886658, - "real": 6354.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "150116-233546": { - "TS": 6259.78720379, - "actual": 3071040.0, - "cores": 160, - "metaTI": 19.9084851742, - "metaTV": 4.1549680233, - "metadata": true, - "nodes": 40, - "once": false, - "openi": 9.19333982468, - "openo": 0.398915052414, - "real": 6331.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "150116-233548": { - "TS": 6262.09757543, - "actual": 3071040.0, - "cores": 96, - "metaTI": 19.8878400326, - "metaTV": 4.18029904366, - "metadata": true, - "nodes": 24, - "once": false, - "openi": 10.7430889606, - "openo": 0.238453149796, - "real": 6333.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "150116-233801": { - "TS": 6387.83472657, - "actual": 3071040.0, - "cores": 32, - "metaTI": 43.1699090004, - "metaTV": 7.70080971718, - "metadata": true, - "nodes": 8, - "once": false, - "openi": 10.7566359043, - "openo": 0.331459999084, - "real": 6491.0, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "150918-160256": { - "TS": 7476.31867456, - "actual": 3071040.0, - "cores": 16, - "metaTI": 220.60375309, - "metaTV": 20.3197700977, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 6.91684889793, - "openo": 0.235767841339, - "real": 7504.10058808, - "request": 3029589.93713, - "sys": "yellowstone" - }, - "151002-111249": { - "TSr": 1027.95776629, - "TSw": 5291.65748668, - "actual": 3121360.0, - "cores": 16, - "metaTIr": 104.953143358, - "metaTIw": 87.6997177601, - "metaTVr": 12.7391045094, - "metaTVw": 1.22518730164, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 16.252808094, - "openo": 0.1517329216, - "real": 6153.10157013, - "request": 3073293.60839, - "sys": "yellowstone" - }, - "151002-134134": { - "TSr": 1017.69567513, - "TSw": 5369.47686791, - "actual": 3121360.0, - "cores": 16, - "metaTIr": 110.276347637, - "metaTIw": 88.3693189621, - "metaTVr": 15.7053070068, - "metaTVw": 19.1175069809, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 12.0257339478, - "openo": 0.108300924301, - "real": 6260.71265197, - "request": 3073293.60839, - "sys": "yellowstone" - }, - "151002-162705": { - "TSr": 1008.87791133, - "TSw": 5330.33792472, - "actual": 3121360.0, - "cores": 16, - "metaTIr": 107.638655424, - "metaTIw": 88.9155454636, - "metaTVr": 14.8814268112, - "metaTVw": 2.1268658638, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 11.387442112, - "openo": 0.150990962982, - "real": 6270.88925004, - "request": 3073293.60839, - "sys": "yellowstone" - }, - "151005-154933": { - "TSr": 1048.0297327, - "TSw": 5279.75957608, - "actual": 3121360.0, - "cores": 16, - "metaTIr": 86.8662247658, - "metaTIw": 88.4578080177, - "metaTVr": 0.743849277496, - "metaTVw": 0.992867469788, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 67.3424155712, - "openo": 0.199492692947, - "real": 6141.01054811, - "request": 3073293.60839, - "sys": "yellowstone" - } - } - }, - "startYear": "0001" - }, - "POP-1.0": { - "baseline": "/glade/u/tdd/asap/bakeoff/tseries/pop-1.0", - "endYear": "0010", - "input": "/glade/u/tdd/asap/bakeoff/hist/pop-1.0", - "isize": 192182.272, - "n2dVars": 78, - "n3dVars": 36, - "nVars": 114, - "osize": 194261.85, - "results": { - "cdo": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 10739.0, - "sys": "caldera" - } - }, - "ncl": { - "140303-095502": { - "cores": 1, - "correct": "fail", - "kernel": 771.41, - "metadata": false, - "nodes": 1, - "real": 31790.53, - "sys": "geyser", - "user": 1118.61 - } - }, - "nco": { - "??????-??????": { - "cores": 1, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 5679.0, - "sys": "geyser" - } - }, - "ncr": { - "140226-171901": { - "cores": 4, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 1623.0, - "sys": "caldera" - }, - "140312-202524": { - "TS": 1288.62, - "cores": 16, - "correct": "pass", - "metaTI": 14.2, - "metaTV": 116.77, - "metadata": true, - "nodes": 4, - "real": 1465.0, - "sys": "caldera" - } - }, - "pagoda": { - "??????-??????": { - "cores": 4, - "correct": "pass", - "metadata": true, - "nodes": 1, - "real": 5513.0, - "sys": "caldera" - } - }, - "pynio": { - "140306-104238": { - "cores": 1, - "correct": "fail", - "kernel": 183.7, - "metadata": false, - "nodes": 1, - "real": 6890.64, - "sys": "geyser", - "user": 1043.99 - }, - "140307-094628": { - "TS": 4436.59, - "actual": 242880.0, - "cores": 1, - "correct": "fail", - "kernel": 311.76, - "metaTI": 422.84, - "metaTV": 260.98, - "metadata": true, - "nodes": 1, - "openi": 9.83, - "openo": 1.07, - "real": 5159.22, - "request": 192163.54, - "sys": "geyser", - "user": 1409.84 - } - }, - "pynio4_0": { - "140317-232141": { - "TS": 7754.38, - "actual": 352320.0, - "cores": 1, - "correct": "fail", - "kernel": 298.58, - "metaTI": 88.15, - "metaTV": 177.24, - "metadata": true, - "nodes": 1, - "openi": 9.75, - "openo": 0.17, - "real": 8032.64, - "request": 192163.54, - "sys": "geyser", - "user": 3312.31 - } - }, - "pyniompi": { - "140305-211128": { - "cores": 16, - "correct": "fail", - "kernel": 0.1, - "metadata": false, - "nodes": 4, - "real": 729.3, - "sys": "yellowstone", - "user": 0.23 - }, - "140313-155030": { - "TS": 1103.49, - "actual": 242880.0, - "cores": 16, - "correct": "fail", - "kernel": 0.13, - "metaTI": 7.21, - "metaTV": 9.13, - "metadata": true, - "nodes": 4, - "openi": 12.52, - "openo": 0.09, - "real": 1141.67, - "request": 192163.54, - "sys": "yellowstone", - "user": 0.19 - } - }, - "pyniompi4_0": { - "140318-070023": { - "TS": 1087.52, - "actual": 352320.0, - "cores": 16, - "correct": "fail", - "kernel": 0.23, - "metaTI": 5.21, - "metaTV": 11.24, - "metadata": true, - "nodes": 4, - "openi": 9.47, - "openo": 0.09, - "real": 1122.78, - "request": 192163.54, - "sys": "yellowstone", - "user": 0.17 - } - }, - "pyniompi4_1": { - "140318-092050": { - "TS": 1487.0, - "actual": 352320.0, - "cores": 16, - "correct": "fail", - "kernel": 0.23, - "metaTI": 8.59, - "metaTV": 13.03, - "metadata": true, - "nodes": 4, - "openi": 9.47, - "openo": 0.26, - "real": 1525.21, - "request": 192163.54, - "sys": "yellowstone", - "user": 0.17 - } - }, - "pyreshaper": { - "140624-115036": { - "TS": 1115.6568172, - "actual": 352320.0, - "cores": 16, - "metaTI": 22.2814490795, - "metaTV": 52.5229613781, - "metadata": true, - "nodes": 4, - "openi": 23.7155740261, - "openo": 0.268895864487, - "real": 1213.0, - "request": 192163.858337, - "sys": "yellowstone" - }, - "140826-150055": { - "TS": 642.152941704, - "actual": 352320.0, - "cores": 16, - "metaTI": 8.12098765373, - "metaTV": 17.7993154526, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 10.1301939487, - "openo": 0.259157657623, - "real": 668.0, - "request": 192163.858337, - "sys": "yellowstone" - }, - "140902-143159": { - "TS": 792.752401829, - "actual": 352320.0, - "cores": 16, - "metaTI": 6.48322582245, - "metaTV": 19.5578858852, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 7.57899308205, - "openo": 0.230998516083, - "real": 813.0, - "request": 192163.858337, - "sys": "yellowstone" - }, - "140911-114206": { - "TS": 873.247316599, - "actual": 352320.0, + "pyreshaper4c-v0": { + "151005-141033": { + "TSr": 179.972988367, + "TSw": 7.15631198883, + "actual": 177840.0, "cores": 16, - "metaTI": 11.7036771774, - "metaTV": 32.4409461021, + "metaTIr": 1.48803186417, + "metaTIw": 1.63100767136, + "metaTVr": 0.086678981781, + "metaTVw": 0.353240013123, "metadata": true, "nodes": 4, "once": false, - "openi": 11.7263197899, - "openo": 0.302983999252, - "real": 956.0, - "request": 192163.858337, + "openi": 19.2583482265, + "openo": 0.235752820969, + "real": 248.850775957, + "request": 8336.41067505, "sys": "yellowstone" } + } + }, + "startYear": "0001" + }, + "CLM-0.25": { + "baseline": "/glade/p/tdd/asap/bakeoff/tseries/clmse-0.25", + "endYear": "0010", + "input": "/glade/p/tdd/asap/bakeoff/hist/clmse-0.25", + "isize": 81802.24, + "n2dVars": 150, + "nVars": 150, + "osize": 124702.8, + "results": { + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 4141.0, + "sys": "geyser" + } }, - "pyreshaper4": { - "140624-115036": { - "TS": 1145.50996685, - "actual": 352320.0, + "pyreshaper4c": { + "170103-125132": { + "TSr": 145.747853279, + "TSw": 92.9539027214, + "actual": 755160.0, "cores": 16, - "metaTI": 20.7704982758, - "metaTV": 45.629805088, + "metaTIr": 3.08102965355, + "metaTIw": 2.45048451424, + "metaTVr": 49.6966121197, + "metaTVw": 3.81450819969, "metadata": true, - "nodes": 4, - "openi": 17.5859360695, - "openo": 0.32540512085, - "real": 1219.0, - "request": 192163.858337, + "nodes": 1, + "once": false, + "openi": 52.782582283, + "openo": 0.524458169937, + "real": 358.438014984, + "request": 84629.9774551, "sys": "yellowstone" }, - "140826-150056": { - "TS": 644.412581205, - "actual": 352320.0, + "170104-101711": { + "TSr": 137.892370939, + "TSw": 89.561924696, + "actual": 755160.0, "cores": 16, - "metaTI": 8.02404499054, - "metaTV": 16.4856157303, + "metaTIr": 2.46095728874, + "metaTIw": 1.8906545639, + "metaTVr": 47.8124940395, + "metaTVw": 3.76889061928, "metadata": true, "nodes": 4, "once": false, - "openi": 10.1310958862, - "openo": 0.290377378464, - "real": 669.0, - "request": 192163.858337, + "openi": 46.272790432, + "openo": 0.326344966888, + "real": 342.894929886, + "request": 84629.9774551, "sys": "yellowstone" }, - "140902-135750": { - "TS": 686.631240129, - "actual": 352320.0, - "cores": 16, - "metaTI": 8.02413105965, - "metaTV": 13.8782639503, + "170105-153542": { + "TSr": 33.5559618473, + "TSw": 66.9288029671, + "actual": 755160.0, + "cores": 128, + "metaTIr": 0.473731279373, + "metaTIw": 0.370635986328, + "metaTVr": 7.51144719124, + "metaTVw": 0.590987205505, "metadata": true, - "nodes": 4, + "nodes": 8, "once": false, - "openi": 10.1490437984, - "openo": 0.420978069305, - "real": 753.0, - "request": 192163.858337, + "openi": 9.45170307159, + "openo": 2.42541074753, + "real": 120.56876111, + "request": 84629.9774551, "sys": "yellowstone" - }, - "140911-115129": { - "TS": 915.880971432, - "actual": 352320.0, + } + }, + "pyreshaper4c-v0": { + "151005-141143": { + "TSr": 124.386564493, + "TSw": 65.0036051273, + "actual": 798360.0, "cores": 16, - "metaTI": 15.7554838657, - "metaTV": 37.2950565815, + "metaTIr": 11.1683745384, + "metaTIw": 42.7969913483, + "metaTVr": 0.692959308624, + "metaTVw": 2.90459418297, "metadata": true, "nodes": 4, "once": false, - "openi": 15.9514939785, - "openo": 0.687720775604, - "real": 1004.0, - "request": 192163.858337, + "openi": 69.9783821106, + "openo": 0.696071147919, + "real": 313.190394878, + "request": 124675.246468, "sys": "yellowstone" } + } + }, + "startYear": "0001" + }, + "CLM-1.0": { + "baseline": "/glade/p/tdd/asap/bakeoff/tseries/clmse-1.0", + "endYear": "0010", + "input": "/glade/p/tdd/asap/bakeoff/hist/clmse-1.0", + "isize": 8694.784, + "n2dVars": 297, + "n3dVars": 0, + "nVars": 297, + "osize": 14051.0, + "results": { + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 4321.0, + "sys": "geyser" + } }, "pyreshaper4c": { - "140624-115037": { - "TS": 1236.12181854, - "actual": 352320.0, + "170103-122251": { + "TSr": 98.7139286995, + "TSw": 10.8411934376, + "actual": 1436292.0, "cores": 16, - "metaTI": 11.7521572113, - "metaTV": 29.1930208206, + "metaTIr": 0.246340513229, + "metaTIw": 0.241369485855, + "metaTVr": 7.10129094124, + "metaTVw": 6.31414985657, "metadata": true, - "nodes": 4, - "openi": 9.35915207863, - "openo": 0.2045814991, - "real": 1299.0, - "request": 192163.858337, + "nodes": 1, + "once": false, + "openi": 94.6719768047, + "openo": 0.714631795883, + "real": 264.54977417, + "request": 9064.19198227, "sys": "yellowstone" }, - "140826-150125": { - "TS": 644.170685768, - "actual": 352320.0, + "170104-100756": { + "TSr": 91.9671385288, + "TSw": 10.4563491344, + "actual": 1436292.0, "cores": 16, - "metaTI": 7.95990419388, - "metaTV": 11.3562419415, + "metaTIr": 0.229573965073, + "metaTIw": 0.176153182983, + "metaTVr": 7.06545376778, + "metaTVw": 6.32949018478, "metadata": true, "nodes": 4, - "openi": 10.1307721138, - "openo": 0.503438711166, - "real": 698.0, - "request": 192163.858337, + "once": false, + "openi": 87.7128083706, + "openo": 1.07958054543, + "real": 252.360585928, + "request": 9064.19198227, "sys": "yellowstone" }, - "140902-111632": { - "TS": 1113.32402444, - "actual": 243840.0, - "cores": 16, - "metaTI": 1.50622916222, - "metaTV": 2.53401875496, + "170105-153417": { + "TSr": 13.275077343, + "TSw": 4.88372540474, + "actual": 1436292.0, + "cores": 128, + "metaTIr": 0.033305644989, + "metaTIw": 0.025661945343, + "metaTVr": 1.03993630409, + "metaTVw": 0.823083162308, "metadata": true, - "nodes": 4, - "once": true, - "openi": 7.68989086151, - "openo": 0.324621915817, - "real": 1163.0, - "request": 192163.547974, + "nodes": 8, + "once": false, + "openi": 14.0570809841, + "openo": 1.68021798134, + "real": 39.9249811172, + "request": 9064.19198227, "sys": "yellowstone" - }, - "140902-134311": { - "TS": 802.531996727, - "actual": 352320.0, + } + }, + "pyreshaper4c-v0": { + "151005-141339": { + "TSr": 312.091857433, + "TSw": 9.57465410233, + "actual": 1443420.0, "cores": 16, - "metaTI": 9.37750196457, - "metaTV": 18.0378842354, + "metaTIr": 3.0339858532, + "metaTIw": 5.92696642876, + "metaTVr": 1.17320466042, + "metaTVw": 10.8604056835, "metadata": true, "nodes": 4, "once": false, - "openi": 9.04029607773, - "openo": 0.221019029617, - "real": 858.0, - "request": 192163.858337, + "openi": 91.3150508404, + "openo": 0.4595246315, + "real": 435.240852833, + "request": 14019.9852104, "sys": "yellowstone" - }, - "140902-212043": { - "TS": 1147.33637905, - "actual": 352320.0, + } + } + }, + "startYear": "0001" + }, + "POP-0.1": { + "baseline": "/glade/p/tdd/asap/bakeoff/tseries/pop-0.1", + "endYear": "0010", + "input": "/glade/p/tdd/asap/bakeoff/hist/pop-0.1", + "isize": 3030384.64, + "n2dVars": 23, + "n3dVars": 11, + "nVars": 34, + "osize": 3124562.75, + "results": { + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 52558.0, + "sys": "geyser" + } + }, + "pyreshaper4c": { + "170103-174340": { + "TSr": 1442.92593026, + "TSw": 5308.8416512, + "actual": 3133696.0, "cores": 16, - "metaTI": 16.6726238728, - "metaTV": 35.6920032501, + "metaTIr": 29.4389734268, + "metaTIw": 83.7112417221, + "metaTVr": 20.6144771576, + "metaTVw": 0.534180402756, "metadata": true, "nodes": 1, "once": false, - "openi": 17.8658120632, - "openo": 1.45822024345, - "real": 1192.0, - "request": 192163.858337, + "openi": 27.4318027496, + "openo": 0.564414024353, + "real": 6403.45959306, + "request": 3073293.60839, "sys": "yellowstone" }, - "140911-115433": { - "TS": 1125.10803962, - "actual": 352320.0, + "170105-134458": { + "TSr": 1368.83460212, + "TSw": 5157.85125303, + "actual": 3133696.0, "cores": 16, - "metaTI": 15.5659887791, - "metaTV": 24.7654159069, + "metaTIr": 27.2387111187, + "metaTIw": 82.7989630699, + "metaTVr": 18.3602511883, + "metaTVw": 0.466962575912, "metadata": true, "nodes": 4, "once": false, - "openi": 15.9480159283, - "openo": 0.308713436127, - "real": 1188.0, - "request": 192163.858337, + "openi": 77.5593559742, + "openo": 0.12478017807, + "real": 6185.68609095, + "request": 3073293.60839, "sys": "yellowstone" }, - "150918-141526": { - "TS": 1033.53211641, - "actual": 352320.0, - "cores": 16, - "metaTI": 13.9353010654, - "metaTV": 26.8450388908, + "170105-171755": { + "TSr": 1389.7871573, + "TSw": 5204.57347322, + "actual": 3133696.0, + "cores": 128, + "metaTIr": 8.90481829643, + "metaTIw": 14.0048742294, + "metaTVr": 4.62982535362, + "metaTVw": 0.0908977985382, "metadata": true, - "nodes": 4, + "nodes": 8, "once": false, - "openi": 9.45551896095, - "openo": 0.459881305695, - "real": 1052.35365105, - "request": 192163.858337, + "openi": 5.2437980175, + "openo": 0.519630908966, + "real": 6253.32032204, + "request": 3073293.60839, "sys": "yellowstone" - }, - "151002-094524": { - "TS": 341.83474493, - "actual": 381048.0, + } + }, + "pyreshaper4c-v0": { + "151005-154933": { + "TSr": 1048.0297327, + "TSw": 5279.75957608, + "actual": 3121360.0, "cores": 16, - "metaTI": 4.18152046204, - "metaTV": 1.73612046242, + "metaTIr": 86.8662247658, + "metaTIw": 88.4578080177, + "metaTVr": 0.743849277496, + "metaTVw": 0.992867469788, "metadata": true, "nodes": 4, "once": false, - "openi": 14.1330201626, - "openo": 0.300295114517, - "real": 908.396248817, - "request": 194248.586632, + "openi": 67.3424155712, + "openo": 0.199492692947, + "real": 6141.01054811, + "request": 3073293.60839, "sys": "yellowstone" - }, - "151002-121349": { - "TSr": 732.107361078, - "TSw": 340.210021734, + } + } + }, + "startYear": "0001" + }, + "POP-1.0": { + "baseline": "/glade/p/tdd/asap/bakeoff/tseries/pop-1.0", + "endYear": "0010", + "input": "/glade/p/tdd/asap/bakeoff/hist/pop-1.0", + "isize": 192182.272, + "n2dVars": 78, + "n3dVars": 36, + "nVars": 114, + "osize": 194261.85, + "results": { + "nco": { + "??????-??????": { + "cores": 1, + "correct": "pass", + "metadata": true, + "nodes": 1, + "real": 5679.0, + "sys": "geyser" + } + }, + "pyreshaper4c": { + "170103-122623": { + "TSr": 116.757568121, + "TSw": 338.39516592, "actual": 381048.0, "cores": 16, - "metaTIr": 4.61803150177, - "metaTIw": 4.13230228424, - "metaTVr": 16.138954401, - "metaTVw": 1.73543787003, + "metaTIr": 0.762569904327, + "metaTIw": 0.212284803391, + "metaTVr": 25.1479203701, + "metaTVw": 0.907991170883, "metadata": true, - "nodes": 4, + "nodes": 1, "once": false, - "openi": 10.0871970654, - "openo": 0.285751581192, - "real": 1015.06130505, + "openi": 39.6051537991, + "openo": 0.481953144073, + "real": 475.79409194, "request": 194248.586632, "sys": "yellowstone" }, - "151002-145908": { - "TSr": 727.685482979, - "TSw": 344.882488966, + "170104-101131": { + "TSr": 101.284484625, + "TSw": 336.792274475, "actual": 381048.0, "cores": 16, - "metaTIr": 4.81376791, - "metaTIw": 5.70712137222, - "metaTVr": 16.7337415218, - "metaTVw": 3.95496964455, + "metaTIr": 0.745910167694, + "metaTIw": 0.202309846878, + "metaTVr": 30.0527272224, + "metaTVw": 0.86515212059, "metadata": true, "nodes": 4, "once": false, - "openi": 12.9056642056, - "openo": 0.517478704453, - "real": 991.616967916, + "openi": 44.797388792, + "openo": 0.106647253036, + "real": 466.590247869, "request": 194248.586632, "sys": "yellowstone" }, - "151005-134536": { - "TSr": 347.185287476, - "TSw": 341.016569376, + "170105-153608": { + "TSr": 32.7535636425, + "TSw": 98.8102724552, "actual": 381048.0, - "cores": 16, - "metaTIr": 7.16235041618, - "metaTIw": 4.25477218628, - "metaTVr": 0.17556476593, - "metaTVw": 1.78972244263, + "cores": 128, + "metaTIr": 0.0864517688751, + "metaTIw": 0.0497231483459, + "metaTVr": 3.53658008575, + "metaTVw": 0.0925180912018, "metadata": true, - "nodes": 4, + "nodes": 8, "once": false, - "openi": 51.1564006805, - "openo": 0.26961684227, - "real": 678.237536907, + "openi": 5.75913763046, + "openo": 0.528867959976, + "real": 149.96667695, "request": 194248.586632, "sys": "yellowstone" - }, + } + }, + "pyreshaper4c-v0": { "151005-141834": { "TSr": 394.218106985, "TSw": 342.007935047, @@ -5260,185 +969,7 @@ "nVars": 4, "osize": 6963.2, "results": { - "pyreshaper": { - "140911-111816": { - "TS": 202.800698996, - "actual": 350304.0, - "cores": 16, - "metaTI": 0.725324869156, - "metaTV": 82.8588526249, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 83.4460978508, - "openo": 0.0272371768951, - "real": 398.0, - "request": 208227.04184, - "sys": "yellowstone" - }, - "140911-114043": { - "TS": 384.075725555, - "actual": 350304.0, - "cores": 16, - "metaTI": 1.36809396744, - "metaTV": 219.417865038, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 130.947030067, - "openo": 0.145795822144, - "real": 745.0, - "request": 208227.04184, - "sys": "yellowstone" - } - }, - "pyreshaper4": { - "140911-111815": { - "TS": 213.01793766, - "actual": 350304.0, - "cores": 16, - "metaTI": 0.641710042953, - "metaTV": 73.404512167, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 66.3904781342, - "openo": 0.183240890503, - "real": 376.0, - "request": 208227.04184, - "sys": "yellowstone" - }, - "140911-114834": { - "TS": 334.502142906, - "actual": 350304.0, - "cores": 16, - "metaTI": 2.75354886055, - "metaTV": 189.406442404, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 286.188698053, - "openo": 0.625742197037, - "real": 829.0, - "request": 208227.04184, - "sys": "yellowstone" - } - }, - "pyreshaper4c": { - "140911-111815": { - "TS": 241.960571527, - "actual": 350304.0, - "cores": 16, - "metaTI": 0.735607147217, - "metaTV": 43.8486521244, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 65.7884709835, - "openo": 0.218874931335, - "real": 376.0, - "request": 208227.04184, - "sys": "yellowstone" - }, - "140911-114804": { - "TS": 364.382410526, - "actual": 350304.0, - "cores": 16, - "metaTI": 3.3417840004, - "metaTV": 155.952032328, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 286.188265085, - "openo": 0.610445976257, - "real": 799.0, - "request": 208227.04184, - "sys": "yellowstone" - }, - "150918-140809": { - "TS": 344.406491041, - "actual": 315360.0, - "cores": 16, - "metaTI": 2.29843902588, - "metaTV": 142.575973034, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 165.554438829, - "openo": 0.451182126999, - "real": 484.280472994, - "request": 187507.280182, - "sys": "yellowstone" - }, - "151002-094206": { - "TS": 44.544624567, - "actual": 158608.0, - "cores": 16, - "metaTI": 0.416500329971, - "metaTV": 4.26540613174, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 125.451020956, - "openo": 0.415581941605, - "real": 293.116672993, - "request": 6232.80738831, - "sys": "yellowstone" - }, - "151002-120613": { - "TSr": 288.929755926, - "TSw": 45.5107212067, - "actual": 158608.0, - "cores": 16, - "metaTIr": 1.99480342865, - "metaTIw": 0.461429834366, - "metaTVr": 125.482697725, - "metaTVw": 4.67854905128, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 208.385211945, - "openo": 0.543105840683, - "real": 341.90487504, - "request": 6232.80738831, - "sys": "yellowstone" - }, - "151002-145228": { - "TSr": 222.422555923, - "TSw": 42.7405705452, - "actual": 11296.0, - "cores": 16, - "metaTIr": 1.67179584503, - "metaTIw": 0.42426276207, - "metaTVr": 131.151450157, - "metaTVw": 1.62547826767, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 210.754734993, - "openo": 0.479517936707, - "real": 392.414848089, - "request": 6232.80738831, - "sys": "yellowstone" - }, - "151005-134516": { - "TSr": 272.211297989, - "TSw": 40.9105670452, - "actual": 11296.0, - "cores": 16, - "metaTIr": 2.31458997726, - "metaTIw": 0.411843299866, - "metaTVr": 0.16576385498, - "metaTVw": 0.361858606339, - "metadata": true, - "nodes": 4, - "once": false, - "openi": 166.480087757, - "openo": 0.669489860535, - "real": 479.004032135, - "request": 6232.80738831, - "sys": "yellowstone" - }, + "pyreshaper4c-v0": { "151005-141854": { "TSr": 248.436975718, "TSw": 43.21550107, diff --git a/tests/yellowstone/utilities/plottools.py b/tests/yellowstone/utilities/plottools.py index 139ee9f1..bfb28e58 100755 --- a/tests/yellowstone/utilities/plottools.py +++ b/tests/yellowstone/utilities/plottools.py @@ -347,12 +347,30 @@ def get_duration_pdata(data_dict): plot_dict[dataset] = {} for method in data_dict[dataset][__RESULTS__]: plot_dict[dataset][method] = [] - for job_data in data_dict[dataset][__RESULTS__][method].values(): + for job_id in sorted(data_dict[dataset][__RESULTS__][method]): + job_data = data_dict[dataset][__RESULTS__][method][job_id] real_time = float(job_data['real']) / 60.0 plot_dict[dataset][method].append(real_time) return plot_dict +#============================================================================== +# Gather speedup data from a data dictionary and return a plot dictionary +#============================================================================== +def get_speedup_pdata(plot_dict, over_method): + to_remove = [] + for dataset in plot_dict: + if over_method in plot_dict[dataset]: + over_time = plot_dict[dataset].pop(over_method) + for method in plot_dict[dataset]: + plot_dict[dataset][method] *= 1.0/over_time + else: + to_remove.append(dataset) + for dataset in to_remove: + plot_dict.pop(dataset) + return plot_dict + + #============================================================================== # Gather throughput data from a data dictionary and return a plot dictionary #============================================================================== @@ -364,7 +382,8 @@ def get_throughput_pdata(data_dict): plot_dict[dataset] = {} for method in data_dict[dataset][__RESULTS__]: plot_dict[dataset][method] = [] - for job_data in data_dict[dataset][__RESULTS__][method].values(): + for job_id in sorted(data_dict[dataset][__RESULTS__][method]): + job_data = data_dict[dataset][__RESULTS__][method][job_id] throughput = isize / float(job_data['real']) plot_dict[dataset][method].append(throughput) return plot_dict @@ -376,8 +395,9 @@ def get_throughput_pdata(data_dict): def reduce_pdata(plot_dict, func=numpy.average): for dataset in plot_dict: for method in plot_dict[dataset]: - reduced_data = func(plot_dict[dataset][method]) - plot_dict[dataset][method] = reduced_data + if isinstance(plot_dict[dataset][method], (list,tuple)): + reduced_data = func(plot_dict[dataset][method]) + plot_dict[dataset][method] = reduced_data return plot_dict @@ -388,19 +408,16 @@ def make_bar_plot(pdata, filename, dataset_order, method_order, method_colors, dataset_labels, method_labels, title=None, xlabel=None, ylabel=None, - figsize=(4, 3), - figadjustments={ - 'left': 0.175, 'right': 0.98, 'top': 0.915, 'bottom': 0.275}, + figsize=(8, 6), + figadjustments={'left': 0.175, 'right': 0.98, 'top': 0.915, 'bottom': 0.275}, labelrotation=35, titlefontsize=12, labelfontsize=10, tickfontsize=9, legendfontsize=8, + logplot=False, figformat='pdf'): - # Reduce the data first (if already reduced, does nothing) - pdata = reduce_pdata(pdata) - # Get the names of the datasets and the methods dataset_names = set(pdata.keys()) method_names = set() @@ -409,19 +426,19 @@ def make_bar_plot(pdata, filename, method_names.update(new_set) # Check that the order and colors lists contain enough names - if not set(dataset_order).issubset(dataset_names): + if not set(dataset_order).issuperset(dataset_names): raise ValueError( 'Dataset order must contain all dataset names found in the plot dictionary') - if not set(dataset_labels).issubset(dataset_names): + if not set(dataset_labels).issuperset(dataset_names): raise ValueError( 'Dataset labels must contain all dataset names found in the plot dictionary') - if not set(method_order).issubset(method_names): + if not set(method_order).issuperset(method_names): raise ValueError( 'Method order must contain all method names found in the plot dictionary') - if not set(method_colors).issubset(method_names): + if not set(method_colors).issuperset(method_names): raise ValueError( 'Method colors must contain all method names found in the plot dictionary') - if not set(method_labels).issubset(method_names): + if not set(method_labels).issuperset(method_names): raise ValueError( 'Method labels must contain all method names found in the plot dictionary') @@ -439,7 +456,7 @@ def make_bar_plot(pdata, filename, plt.figure(figsize=figsize) plt.subplots_adjust(**figadjustments) - # Plot every method and dataset in the plot dictionary + axes = {'xmin': [], 'xmax': [], 'ymin': [], 'ymax': []} for method in method_order: if method in method_names: yvalues = numpy.zeros(n_datasets) @@ -454,8 +471,17 @@ def make_bar_plot(pdata, filename, clr = method_colors[str(method)] lab = method_labels[str(method)] - plt.bar(xvalues, yvalues, width, color=clr, label=lab) + plt.bar(xvalues, yvalues, width, color=clr, label=lab, log=logplot) offset += width + + for nm,vl in zip(['xmin', 'xmax', 'ymin', 'ymax'], plt.axis()): + axes[nm].append(vl) + + axes['xmin'] = min(axes['xmin']) + axes['xmax'] = max(axes['xmax']) + axes['ymin'] = min(axes['ymin']) + axes['ymax'] = max(axes['ymax']) + new_axes = [axes[nm] for nm in ['xmin', 'xmax', 'ymin', 'ymax']] # Label the x-axis values xlabels = [] @@ -473,4 +499,5 @@ def make_bar_plot(pdata, filename, plt.xticks(xbase, xlabels, rotation=labelrotation, ha='right', fontsize=tickfontsize) plt.legend(loc=2, fontsize=legendfontsize) + plt.axis(new_axes) plt.savefig(filename, format=figformat) diff --git a/tests/yellowstone/utilities/testtools.py b/tests/yellowstone/utilities/testtools.py index 7f054078..0e8b6614 100644 --- a/tests/yellowstone/utilities/testtools.py +++ b/tests/yellowstone/utilities/testtools.py @@ -80,8 +80,7 @@ def __init__(self, name='testinfo.json'): self._database = dict(json.load(dbfile)) dbfile.close() except: - err_msg = 'Problem reading and parsing test info file: {0!s}'.format( - abs_path) + err_msg = 'Problem reading and parsing test info file: {0!s}'.format(abs_path) raise ValueError(err_msg) def getdb(self): @@ -195,8 +194,7 @@ def __init__(self, name=None): self._statistics = dict(json.load(stfile)) stfile.close() except: - err_msg = 'Problem reading and parsing test stats file: {0!s}'.format( - abs_path) + err_msg = 'Problem reading and parsing test stats file: {0!s}'.format(abs_path) raise ValueError(err_msg) def getdb(self): @@ -531,7 +529,7 @@ def save(self, name="teststats.json"): #============================================================================== class TimeDB(object): - def __init__(self, name=None): + def __init__(self, name='timings.json'): """ Initializer @@ -543,20 +541,17 @@ def __init__(self, name=None): ValueError: If the timing database file cannot be opened and/or read. """ - # See if there is a user-defined testinfo file, - # otherwise look for default - abs_path = os.path.abspath('timings.json') + # See if there is a user-defined timings file, otherwise look for default + abs_path = os.path.abspath(name) # Try opening and reading the testinfo file - self._timings = {} try: dbfile = open(abs_path, 'r') self._timings = dict(json.load(dbfile)) dbfile.close() except: - err_msg = 'Problem reading and parsing timings file: {0!s}'.format( - abs_path) - raise ValueError(err_msg) + print 'Timings file does not exist. Creating a new timings database.' + self._timings = {} def getdb(self): """ From bc9def784ff4966d46d6690b30cb211f43f9cd64 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Tue, 25 Apr 2017 11:52:47 -0600 Subject: [PATCH 22/27] update cheyenne to use mpi4py/2.0.0-mpt native SGI mpt --- Machines/cheyenne_modules | 7 ++++--- Machines/machine_postprocess.xml | 6 ++---- Templates/batch_cheyenne.tmpl | 5 ----- 3 files changed, 6 insertions(+), 12 deletions(-) diff --git a/Machines/cheyenne_modules b/Machines/cheyenne_modules index af4b0c83..fdcfbb85 100755 --- a/Machines/cheyenne_modules +++ b/Machines/cheyenne_modules @@ -8,12 +8,13 @@ module restore system module load python/2.7.13 module load numpy/1.12.0 module load scipy/0.18.1 -module load impi/5.1.3.210 -module load mpi4py/2.0.0-impi +##module load impi/5.1.3.210 +##module load mpi4py/2.0.0-impi +module load mpi4py/2.0.0-mpt module load netcdf/4.4.1.1 module load pynio/1.4.1 module load matplotlib/2.0.0 -module load intel/16.0.3 +##module load intel/16.0.3 module load nco/4.6.2 module load ncl/6.3.0 module load netcdf4-python/1.2.7 diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 40319e09..ec3a4db0 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -59,7 +59,7 @@ 72 - mpirun -n {{ pes }} + mpiexec_mpt dplace -s 1 f2py @@ -77,11 +77,9 @@ module load python/2.7.13 module load numpy/1.12.0 module load scipy/0.18.1 - module load impi/5.1.3.210 - module load mpi4py/2.0.0-impi + module load mpi4py/2.0.0-mpt module load pynio/1.4.1 module load matplotlib/2.0.0 - module load intel/16.0.3 module load netcdf/4.4.1.1 module load nco/4.6.2 module load netcdf4-python/1.2.7 diff --git a/Templates/batch_cheyenne.tmpl b/Templates/batch_cheyenne.tmpl index 57f8bef6..475412d9 100644 --- a/Templates/batch_cheyenne.tmpl +++ b/Templates/batch_cheyenne.tmpl @@ -5,8 +5,6 @@ ## ## Averages: ## -## TODO - add some hints for cheyenne queues -## ## For ocean hi-resolution or atm data sets with a lot of variables, ## set the netcdf_format XML variable to netcdfLarge, change the queue to ## either geyser (shared) or bigmem (exclusive). For geyser, set -n to 16 @@ -18,7 +16,6 @@ ## ## Diagnostics: ## -## TODO - add some hints for cheyenne queues ## NOTE - geyser and caldera aren't accessible from cheyenne yet as of 1/31/2017. ## ## For diagnostics, the queue should always be set to small, geyser or caldera @@ -30,8 +27,6 @@ ## ## Variable Time series generation: ## -## TODO - add some hints for cheyenne queues -## ## Load balance depends on number of history streams, ## variables per stream and chunk sizes. ## From 5d66f0bcddbead301e3cbecf02963940d08e56c6 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Tue, 25 Apr 2017 13:55:56 -0600 Subject: [PATCH 23/27] remove commented out modules for cheyenne --- Machines/cheyenne_modules | 3 --- 1 file changed, 3 deletions(-) diff --git a/Machines/cheyenne_modules b/Machines/cheyenne_modules index fdcfbb85..d4776ba1 100755 --- a/Machines/cheyenne_modules +++ b/Machines/cheyenne_modules @@ -8,13 +8,10 @@ module restore system module load python/2.7.13 module load numpy/1.12.0 module load scipy/0.18.1 -##module load impi/5.1.3.210 -##module load mpi4py/2.0.0-impi module load mpi4py/2.0.0-mpt module load netcdf/4.4.1.1 module load pynio/1.4.1 module load matplotlib/2.0.0 -##module load intel/16.0.3 module load nco/4.6.2 module load ncl/6.3.0 module load netcdf4-python/1.2.7 From fb15129ba92a1294e5ea0d48658ca02d62a25e7d Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Wed, 26 Apr 2017 17:13:46 -0600 Subject: [PATCH 24/27] initial commit of land-ice diagnostics addition to the land diagnostics set --- .../lnd/Config/config_diags_lnd.xml | 24 + diagnostics/diagnostics/lnd/Plots/__init__.py | 2 +- .../lnd/Plots/lnd_diags_plot_factory.py | 12 +- .../diagnostics/lnd/Plots/set_10_lnd.py | 41 + .../diagnostics/lnd/Plots/set_10_seas_lnd.py | 41 + .../diagnostics/lnd/Plots/set_11_lnd.py | 41 + .../diagnostics/lnd/Plots/set_11_seas_lnd.py | 41 + .../diagnostics/lnd/Plots/set_12_lnd.py | 41 + diagnostics/diagnostics/lnd/model_vs_model.py | 2 +- diagnostics/diagnostics/lnd/model_vs_obs.py | 2 +- examples/CESM2.0-PI-Control/env_diags_lnd.xml | 15 +- lnd_diag/inputFiles/set10_clm.txt | 34 + lnd_diag/inputFiles/set11_clm.txt | 34 + lnd_diag/inputFiles/set2_clm.txt | 2 +- lnd_diag/inputFiles/variable_master4.3.ncl | 146 ++ lnd_diag/model-obs/set_10.ncl | 917 ++++++++++ lnd_diag/model-obs/set_11.ncl | 917 ++++++++++ lnd_diag/model-obs/set_12.ncl | 1452 ++++++++++++++++ lnd_diag/model1-model2/set_10.ncl | 1463 ++++++++++++++++ lnd_diag/model1-model2/set_10_seas.ncl | 1437 ++++++++++++++++ lnd_diag/model1-model2/set_11.ncl | 1463 ++++++++++++++++ lnd_diag/model1-model2/set_11_seas.ncl | 1437 ++++++++++++++++ lnd_diag/model1-model2/set_12.ncl | 1513 +++++++++++++++++ lnd_diag/shared/create_var_lists.csh | 4 + lnd_diag/shared/lnd_create_webpage.pl | 545 +++++- lnd_diag/shared/lnd_func.ncl | 190 +++ 26 files changed, 11805 insertions(+), 11 deletions(-) create mode 100644 diagnostics/diagnostics/lnd/Plots/set_10_lnd.py create mode 100644 diagnostics/diagnostics/lnd/Plots/set_10_seas_lnd.py create mode 100644 diagnostics/diagnostics/lnd/Plots/set_11_lnd.py create mode 100644 diagnostics/diagnostics/lnd/Plots/set_11_seas_lnd.py create mode 100644 diagnostics/diagnostics/lnd/Plots/set_12_lnd.py create mode 100755 lnd_diag/inputFiles/set10_clm.txt create mode 100755 lnd_diag/inputFiles/set11_clm.txt create mode 100755 lnd_diag/model-obs/set_10.ncl create mode 100755 lnd_diag/model-obs/set_11.ncl create mode 100755 lnd_diag/model-obs/set_12.ncl create mode 100755 lnd_diag/model1-model2/set_10.ncl create mode 100755 lnd_diag/model1-model2/set_10_seas.ncl create mode 100755 lnd_diag/model1-model2/set_11.ncl create mode 100755 lnd_diag/model1-model2/set_11_seas.ncl create mode 100755 lnd_diag/model1-model2/set_12.ncl diff --git a/diagnostics/diagnostics/lnd/Config/config_diags_lnd.xml b/diagnostics/diagnostics/lnd/Config/config_diags_lnd.xml index c92de98a..9164b457 100644 --- a/diagnostics/diagnostics/lnd/Config/config_diags_lnd.xml +++ b/diagnostics/diagnostics/lnd/Config/config_diags_lnd.xml @@ -811,6 +811,30 @@ Applies to both model and control cases." desc="Plot Set 9: VALIDATION DIAGNOSTICS (ONLY FOR MODEL-MODEL)" > + + + + + + diff --git a/diagnostics/diagnostics/lnd/Plots/__init__.py b/diagnostics/diagnostics/lnd/Plots/__init__.py index 71bd5c82..d47211cf 100644 --- a/diagnostics/diagnostics/lnd/Plots/__init__.py +++ b/diagnostics/diagnostics/lnd/Plots/__init__.py @@ -1,2 +1,2 @@ -from . import set_1DiffPlot_lnd, set_1_lnd, set_2_seas_lnd, set_2_lnd, set_3_lnd, set_4_lnd, set_5_lnd, set_6_lnd, set_7_lnd, set_8_ann_cycle_lnd, set_8_ann_cycle, set_8_contour, set_8_DJF_JJA_contour, set_8_trends, set_8_zonal_lnd, set_8_zonal, set_9_lnd, lnd_diags_plot_bc, lnd_diags_plot_factory +from . import set_1DiffPlot_lnd, set_1_lnd, set_2_seas_lnd, set_2_lnd, set_3_lnd, set_4_lnd, set_5_lnd, set_6_lnd, set_7_lnd, set_8_ann_cycle_lnd, set_8_ann_cycle, set_8_contour, set_8_DJF_JJA_contour, set_8_trends, set_8_zonal_lnd, set_8_zonal, set_9_lnd, set_10_lnd, set_10_seas_lnd, set_11_lnd, set_11_seas_lnd, set_12_lnd, lnd_diags_plot_bc, lnd_diags_plot_factory diff --git a/diagnostics/diagnostics/lnd/Plots/lnd_diags_plot_factory.py b/diagnostics/diagnostics/lnd/Plots/lnd_diags_plot_factory.py index 8b9898bd..55dbbdaf 100755 --- a/diagnostics/diagnostics/lnd/Plots/lnd_diags_plot_factory.py +++ b/diagnostics/diagnostics/lnd/Plots/lnd_diags_plot_factory.py @@ -22,6 +22,11 @@ import set_8_zonal_lnd import set_8_zonal import set_9_lnd +import set_10_lnd +import set_10_seas_lnd +import set_11_lnd +import set_11_seas_lnd +import set_12_lnd def LandDiagnosticPlotFactory(plot_type,env): """Create and return an object of the requested type. @@ -57,7 +62,12 @@ def LandDiagnosticPlotFactory(plot_type,env): plot_set['set_8_zonal_lnd'] = set_8_zonal_lnd.set_8_zonal_lnd(env) elif plot_type == "set_9": plot_set['set_9'] = set_9_lnd.set_9(env) - + elif plot_type == "set_10": + plot_set['set_10'] = set_10_lnd.set_10(env) + elif plot_type == "set_11": + plot_set['set_11'] = set_11_lnd.set_11(env) + elif plot_type == "set_12": + plot_set['set_12'] = set_12_lnd.set_12(env) else: raise UnknownPlotType("WARNING: Unknown plot type requested: '{0}'".format(plot_type)) diff --git a/diagnostics/diagnostics/lnd/Plots/set_10_lnd.py b/diagnostics/diagnostics/lnd/Plots/set_10_lnd.py new file mode 100644 index 00000000..90b1c8bf --- /dev/null +++ b/diagnostics/diagnostics/lnd/Plots/set_10_lnd.py @@ -0,0 +1,41 @@ +from __future__ import print_function + +import sys + +if sys.hexversion < 0x02070000: + print(70 * "*") + print("ERROR: {0} requires python >= 2.7.x. ".format(sys.argv[0])) + print("It appears that you are running python {0}".format( + ".".join(str(x) for x in sys.version_info[0:3]))) + print(70 * "*") + sys.exit(1) + +import traceback +import os +import shutil +import jinja2 + +# import the helper utility module +from cesm_utils import cesmEnvLib +from diag_utils import diagUtilsLib + +# import the plot baseclass module +from lnd_diags_plot_bc import LandDiagnosticPlot + +class set_10(LandDiagnosticPlot): + """Set 10 Plots + """ + + def __init__(self, env): + super(set_10, self).__init__() + + # Set plot class description variables + self._name = 'Set 10 Plots' + self._shortname = 'set_10' + self._template_file = 'set_10.tmpl' + self.ncl_scripts = ['set_10.ncl'] + self.plot_env = env.copy() + + def check_prerequisites(self, env): + # Set plot specific variables + preq = 'No variables to set' diff --git a/diagnostics/diagnostics/lnd/Plots/set_10_seas_lnd.py b/diagnostics/diagnostics/lnd/Plots/set_10_seas_lnd.py new file mode 100644 index 00000000..d5bba344 --- /dev/null +++ b/diagnostics/diagnostics/lnd/Plots/set_10_seas_lnd.py @@ -0,0 +1,41 @@ +from __future__ import print_function + +import sys + +if sys.hexversion < 0x02070000: + print(70 * "*") + print("ERROR: {0} requires python >= 2.7.x. ".format(sys.argv[0])) + print("It appears that you are running python {0}".format( + ".".join(str(x) for x in sys.version_info[0:3]))) + print(70 * "*") + sys.exit(1) + +import traceback +import os +import shutil +import jinja2 + +# import the helper utility module +from cesm_utils import cesmEnvLib +from diag_utils import diagUtilsLib + +# import the plot baseclass module +from lnd_diags_plot_bc import LandDiagnosticPlot + +class set_2_seas(LandDiagnosticPlot): + """Set set_2_seas Plots + """ + + def __init__(self, env): + super(set_2_seas, self).__init__() + + # Set plot class description variables + self._name = 'Set set_2_seas Plots' + self._shortname = 'set_2_seas' + self._template_file = 'set_2_seas.tmpl' + self.ncl_scripts = ['set_2_seas.ncl'] + self.plot_env = env.copy() + + def check_prerequisites(self, env): + # Set plot specific variables + preq = 'No variables to set' diff --git a/diagnostics/diagnostics/lnd/Plots/set_11_lnd.py b/diagnostics/diagnostics/lnd/Plots/set_11_lnd.py new file mode 100644 index 00000000..b15869a9 --- /dev/null +++ b/diagnostics/diagnostics/lnd/Plots/set_11_lnd.py @@ -0,0 +1,41 @@ +from __future__ import print_function + +import sys + +if sys.hexversion < 0x02070000: + print(70 * "*") + print("ERROR: {0} requires python >= 2.7.x. ".format(sys.argv[0])) + print("It appears that you are running python {0}".format( + ".".join(str(x) for x in sys.version_info[0:3]))) + print(70 * "*") + sys.exit(1) + +import traceback +import os +import shutil +import jinja2 + +# import the helper utility module +from cesm_utils import cesmEnvLib +from diag_utils import diagUtilsLib + +# import the plot baseclass module +from lnd_diags_plot_bc import LandDiagnosticPlot + +class set_11(LandDiagnosticPlot): + """Set 11 Plots + """ + + def __init__(self, env): + super(set_11, self).__init__() + + # Set plot class description variables + self._name = 'Set 11 Plots' + self._shortname = 'set_11' + self._template_file = 'set_11.tmpl' + self.ncl_scripts = ['set_11.ncl'] + self.plot_env = env.copy() + + def check_prerequisites(self, env): + # Set plot specific variables + preq = 'No variables to set' diff --git a/diagnostics/diagnostics/lnd/Plots/set_11_seas_lnd.py b/diagnostics/diagnostics/lnd/Plots/set_11_seas_lnd.py new file mode 100644 index 00000000..b39edc83 --- /dev/null +++ b/diagnostics/diagnostics/lnd/Plots/set_11_seas_lnd.py @@ -0,0 +1,41 @@ +from __future__ import print_function + +import sys + +if sys.hexversion < 0x02070000: + print(70 * "*") + print("ERROR: {0} requires python >= 2.7.x. ".format(sys.argv[0])) + print("It appears that you are running python {0}".format( + ".".join(str(x) for x in sys.version_info[0:3]))) + print(70 * "*") + sys.exit(1) + +import traceback +import os +import shutil +import jinja2 + +# import the helper utility module +from cesm_utils import cesmEnvLib +from diag_utils import diagUtilsLib + +# import the plot baseclass module +from lnd_diags_plot_bc import LandDiagnosticPlot + +class set_11_seas(LandDiagnosticPlot): + """Set set_11_seas Plots + """ + + def __init__(self, env): + super(set_11_seas, self).__init__() + + # Set plot class description variables + self._name = 'Set set_11_seas Plots' + self._shortname = 'set_11_seas' + self._template_file = 'set_11_seas.tmpl' + self.ncl_scripts = ['set_11_seas.ncl'] + self.plot_env = env.copy() + + def check_prerequisites(self, env): + # Set plot specific variables + preq = 'No variables to set' diff --git a/diagnostics/diagnostics/lnd/Plots/set_12_lnd.py b/diagnostics/diagnostics/lnd/Plots/set_12_lnd.py new file mode 100644 index 00000000..243d4fa9 --- /dev/null +++ b/diagnostics/diagnostics/lnd/Plots/set_12_lnd.py @@ -0,0 +1,41 @@ +from __future__ import print_function + +import sys + +if sys.hexversion < 0x02070000: + print(70 * "*") + print("ERROR: {0} requires python >= 2.7.x. ".format(sys.argv[0])) + print("It appears that you are running python {0}".format( + ".".join(str(x) for x in sys.version_info[0:3]))) + print(70 * "*") + sys.exit(1) + +import traceback +import os +import shutil +import jinja2 + +# import the helper utility module +from cesm_utils import cesmEnvLib +from diag_utils import diagUtilsLib + +# import the plot baseclass module +from lnd_diags_plot_bc import LandDiagnosticPlot + +class set_12(LandDiagnosticPlot): + """Set 12 Plots + """ + + def __init__(self, env): + super(set_12, self).__init__() + + # Set plot class description variables + self._name = 'Set 12 Plots' + self._shortname = 'set_12' + self._template_file = 'set_12.tmpl' + self.ncl_scripts = ['set_12.ncl'] + self.plot_env = env.copy() + + def check_prerequisites(self, env): + # Set plot specific variables + preq = 'No variables to set' diff --git a/diagnostics/diagnostics/lnd/model_vs_model.py b/diagnostics/diagnostics/lnd/model_vs_model.py index 7d10ba56..14d322ea 100644 --- a/diagnostics/diagnostics/lnd/model_vs_model.py +++ b/diagnostics/diagnostics/lnd/model_vs_model.py @@ -168,7 +168,7 @@ def run_diagnostics(self, env, scomm): if scomm.is_manager(): # Create web dirs and move images/tables to that web dir - for n in ('1', '2', '3', '4', '5', '6', '7', '8', '9'): + for n in ('1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12'): web_dir = env['WKDIR'] set_dir = web_dir + '/set' + n # Create the plot set web directory diff --git a/diagnostics/diagnostics/lnd/model_vs_obs.py b/diagnostics/diagnostics/lnd/model_vs_obs.py index 15839292..5fea7207 100644 --- a/diagnostics/diagnostics/lnd/model_vs_obs.py +++ b/diagnostics/diagnostics/lnd/model_vs_obs.py @@ -166,7 +166,7 @@ def run_diagnostics(self, env, scomm): if scomm.is_manager(): # Create web dirs and move images/tables to that web dir - for n in ('1', '2', '3', '4', '5', '6', '7', '8', '9'): + for n in ('1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12'): web_dir = env['WKDIR'] set_dir = web_dir + '/set' + n # Create the plot set web directory diff --git a/examples/CESM2.0-PI-Control/env_diags_lnd.xml b/examples/CESM2.0-PI-Control/env_diags_lnd.xml index 90297d38..fa98cb4b 100644 --- a/examples/CESM2.0-PI-Control/env_diags_lnd.xml +++ b/examples/CESM2.0-PI-Control/env_diags_lnd.xml @@ -561,9 +561,20 @@ - + + + - + + + + + + + + + + diff --git a/lnd_diag/inputFiles/set10_clm.txt b/lnd_diag/inputFiles/set10_clm.txt new file mode 100755 index 00000000..537f106c --- /dev/null +++ b/lnd_diag/inputFiles/set10_clm.txt @@ -0,0 +1,34 @@ +globalMeanNat1 TSA +globalMeanDay1 PREC +globalMeanNat0 ASA +globalMeanNat0 RNET +globalMeanNat1 LHEAT +globalMeanNat0 SNOWDP +globalMeanNat0 FSH +globalMeanNat0 TV +globalMeanNat0 TG +globalMeanNat0 FSA +globalMeanNat0 FSR +globalMeanNat0 FIRA +globalMeanNat0 FIRE +globalMeanNat0 FGR +globalMeanNat0 FSM +globalMeanNat0 H2OSNO_ICE +globalMeanDay0 QSNOMELT_ICE +globalMeanDay0 QSNOMELT +globalMeanDay0 QSOIL +globalMeanDay0 QSOIL_ICE +globalMeanNat0 FSNO +globalMeanDay0 RAIN +globalMeanDay0 SNOW +globalMeanDay0 RAIN_REPARTITIONED +globalMeanDay0 SNOW_REPARTITIONED +globalMeanNat0 WIND +globalMeanNat0 FLDS +globalMeanNat0 FSDS +globalMeanNat0 Q2M +globalMeanNat0 RH2M +globalMeanNat0 TSOI +globalMeanNat0 TWS +globalMeanNat0 U10 +globalMeanNat0 PBOT diff --git a/lnd_diag/inputFiles/set11_clm.txt b/lnd_diag/inputFiles/set11_clm.txt new file mode 100755 index 00000000..537f106c --- /dev/null +++ b/lnd_diag/inputFiles/set11_clm.txt @@ -0,0 +1,34 @@ +globalMeanNat1 TSA +globalMeanDay1 PREC +globalMeanNat0 ASA +globalMeanNat0 RNET +globalMeanNat1 LHEAT +globalMeanNat0 SNOWDP +globalMeanNat0 FSH +globalMeanNat0 TV +globalMeanNat0 TG +globalMeanNat0 FSA +globalMeanNat0 FSR +globalMeanNat0 FIRA +globalMeanNat0 FIRE +globalMeanNat0 FGR +globalMeanNat0 FSM +globalMeanNat0 H2OSNO_ICE +globalMeanDay0 QSNOMELT_ICE +globalMeanDay0 QSNOMELT +globalMeanDay0 QSOIL +globalMeanDay0 QSOIL_ICE +globalMeanNat0 FSNO +globalMeanDay0 RAIN +globalMeanDay0 SNOW +globalMeanDay0 RAIN_REPARTITIONED +globalMeanDay0 SNOW_REPARTITIONED +globalMeanNat0 WIND +globalMeanNat0 FLDS +globalMeanNat0 FSDS +globalMeanNat0 Q2M +globalMeanNat0 RH2M +globalMeanNat0 TSOI +globalMeanNat0 TWS +globalMeanNat0 U10 +globalMeanNat0 PBOT diff --git a/lnd_diag/inputFiles/set2_clm.txt b/lnd_diag/inputFiles/set2_clm.txt index 5730d1f0..dbb5fbd5 100755 --- a/lnd_diag/inputFiles/set2_clm.txt +++ b/lnd_diag/inputFiles/set2_clm.txt @@ -104,4 +104,4 @@ globalMeanNat0 PCT_BSOIL_PFT globalMeanNat0 PCT_TREE_PFT globalMeanNat0 PCT_GRASS_PFT globalMeanNat0 PCT_SHRUB_PFT -globalMeanNat0 PCT_CROP_PFT +GLobalMeanNat0 PCT_CROP_PFT diff --git a/lnd_diag/inputFiles/variable_master4.3.ncl b/lnd_diag/inputFiles/variable_master4.3.ncl index f2fad744..2ad2ab7e 100644 --- a/lnd_diag/inputFiles/variable_master4.3.ncl +++ b/lnd_diag/inputFiles/variable_master4.3.ncl @@ -5949,6 +5949,36 @@ if (varName .eq. "LITTERC_HR") then return(info) end if + if (varName .eq. "H2OSNO_ICE") then + info@flux=False + info@longName="total snow water equiv (SNOWICE + SNOWLIQ) over glacier units" + info@nativeUnits = "mm" + ; -- global Total + info@globalTotal__units = "NA" + info@globalTotal__SF = -999. + info@globalTotal_Annual__units = "NA" + info@globalTotal_Annual__SF = -999. + ; -- global Mean + info@globalMean__units = "mm" + info@globalMean__SF = 1.0 + info@globalMean_Annual__units = "NA" + info@globalMean_Annual__SF = -999. + info@globalMean_Daily__units = "NA" + info@globalMean_Daily__SF = -999. + ; -- Origin + info@model = "CN" + info@derivedVariable = False + info@clampVariable = False + ; -- contour intervals + info@cn_Explicit = True + info@cn_Min = -999 + info@cn_Max = -999 + info@cn_Levels = (/0.,25.,50.,75.,100.,125.,150.,175.,200./) + info@cn_LevelsDiff = (/-50.,-40.,-30.,-20.,-10.,0.,10.,20.,30.,40.,50./) + info@cn_LevelsDiffObs = (/-50.,0.,50.,100.,150.,200.,250.,300.,350.,400.,450./) + return(info) + end if + if (varName .eq. "H2OSOI") then info@flux= True info@longName="volumetric soil water" @@ -6903,6 +6933,35 @@ if (varName .eq. "LITTERC_HR") then return(info) end if + if (varName .eq. "QSNOMELT_ICE") then + info@flux=True + info@longName="snow melt over glacier units" + info@nativeUnits = "mm/s" + ; -- global Total + info@globalTotal__units = "NA" + info@globalTotal__SF = -999. + info@globalTotal_Annual__units = "mm/y" + info@globalTotal_Annual__SF = secondsInYear + ; -- global Mean + info@globalMean__units = "mm/s" + info@globalMean__SF = 1.0 + info@globalMean_Annual__units = "mm/y" + info@globalMean_Annual__SF = secondsInYear + info@globalMean_Daily__units = "mm/d" + info@globalMean_Daily__SF = secondsInDay + ; -- Origin + info@model = "CN" + info@derivedVariable = False + info@clampVariable = False + ; -- contour intervals + info@cn_Explicit = True + info@cn_Min = -999 + info@cn_Max = -999 + info@cn_Levels = (/0.,.5,1.,1.5,2.,2.5,3.,3.5,4./) + info@cn_LevelsDiff = (/-5.,-2.5,-1.,-.5,-.1,0.,.1,.5,1.,2.5,5./) + return(info) + end if + if (varName .eq. "SOIL_EVAPORATION") then info@flux=True info@longName="Soil Evaporation" @@ -6961,6 +7020,35 @@ if (varName .eq. "LITTERC_HR") then return(info) end if + if (varName .eq. "QSOIL_ICE") then + info@flux=True + info@longName="ground evaporation over glacier units" + info@nativeUnits = "mm/s" + ; -- global Total + info@globalTotal__units = "NA" + info@globalTotal__SF = -999. + info@globalTotal_Annual__units = "mm/y" + info@globalTotal_Annual__SF = secondsInYear + ; -- global Mean + info@globalMean__units = "mm/s" + info@globalMean__SF = 1.0 + info@globalMean_Annual__units = "mm/y" + info@globalMean_Annual__SF = secondsInYear + info@globalMean_Daily__units = "mm/d" + info@globalMean_Daily__SF = secondsInDay + ; -- Origin + info@model = "CN" + info@derivedVariable = False + info@clampVariable = False + ; -- contour intervals + info@cn_Explicit = True + info@cn_Min = -999 + info@cn_Max = -999 + info@cn_Levels = (/0.,.5,1.,1.5,2.,2.5,3.,3.5,4./) + info@cn_LevelsDiff = (/-5.,-2.5,-1.,-.5,-.1,0.,.1,.5,1.,2.5,5./) + return(info) + end if + if (varName .eq. "CANOPY_EVAPORATION") then info@flux=True info@longName="Canopy Evaporation" @@ -7340,6 +7428,64 @@ if (varName .eq. "LITTERC_HR") then return(info) end if + + if (varName .eq. "SNOW_REPARTITIONED") then + info@flux=True + info@longName="atmospheric snow after repartitioning" + info@nativeUnits = "mm/s" + ; -- global Total + info@globalTotal__units = "NA" + info@globalTotal__SF = -999. + info@globalTotal_Annual__units = "NA" + info@globalTotal_Annual__SF = -999. + ; -- global Mean + info@globalMean__units = "mm/s" + info@globalMean__SF = 1.0 + info@globalMean_Annual__units = "mm/y" + info@globalMean_Annual__SF = secondsInYear + info@globalMean_Daily__units = "mm/d" + info@globalMean_Daily__SF = secondsInDay + ; -- Origin + info@model = "CN" + info@derivedVariable = False + info@clampVariable = False + ; -- contour intervals + info@cn_Explicit = True + info@cn_Min = -999 + info@cn_Max = -999 + info@cn_Levels = (/0.,.5,1.,1.5,2.,2.5,3.,3.5,4./) + info@cn_LevelsDiff = (/-1.25,-1.,-0.75,-0.5,-0.25,0.,0.25,0.5,0.75,1.,1.25/) + return(info) + end if + + if (varName .eq. "RAIN_REPARTITIONED") then + info@flux=True + info@longName="atmospheric rain after repartitioning" + info@nativeUnits = "mm/s" + ; -- global Total + info@globalTotal__units = "NA" + info@globalTotal__SF = -999. + info@globalTotal_Annual__units = "NA" + info@globalTotal_Annual__SF = -999. + ; -- global Mean + info@globalMean__units = "mm/s" + info@globalMean__SF = 1.0 + info@globalMean_Annual__units = "mm/y" + info@globalMean_Annual__SF = secondsInYear + info@globalMean_Daily__units = "mm/d" + info@globalMean_Daily__SF = secondsInDay + ; -- Origin + info@model = "CN" + info@derivedVariable = False + info@clampVariable = False + ; -- contour intervals + info@cn_Explicit = True + info@cn_Min = -999 + info@cn_Max = -999 + info@cn_Levels = (/0.,.5,1.,1.5,2.,2.5,3.,3.5,4./) + info@cn_LevelsDiff = (/-1.25,-1.,-0.75,-0.5,-0.25,0.,0.25,0.5,0.75,1.,1.25/) + return(info) + end if if (varName .eq. "WIND") then info@flux=True info@longName="atmospheric wind velocity magnitude" diff --git a/lnd_diag/model-obs/set_10.ncl b/lnd_diag/model-obs/set_10.ncl new file mode 100755 index 00000000..cf0e35f0 --- /dev/null +++ b/lnd_diag/model-obs/set_10.ncl @@ -0,0 +1,917 @@ +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$DIAG_SHARED/$VAR_MASTER" +load "$DIAG_SHARED/lnd_func.ncl" +load "$DIAG_SHARED/find_var_with_alts.ncl" +;************************************************ +; NOTE: adapted from plot_cons.ncl (KOleson code) +; Revision Author: Nan Rosenbloom Spring 2005 +; Revision Author 2: Jan Lenaerts Spring 2017 +;************************************************ +begin + print ("=========================================") + print ("Starting: set_10.ncl") + print ("Start Time: "+systemfunc("date") ) + print ("=========================================") + wkdir = getenv("WKDIR") + ptmpdir = getenv("PTMPDIR_1") + obsdata = getenv("OBS_DATA") + obs_res = getenv("OBS_RES") + paleo = getenv("paleo") + useCommon1 = getenv("UseCommonName_1") + raster = getenv("raster") + land_mask = getenv("land_mask1") + user_defined = getenv("expContours") + cn = getenv("CN") + colormap = getenv("colormap") + projection=getenv("projection") + cn = getenv("CN") + plot_type = getenv("PLOTTYPE") + + fland_mask = stringtofloat(land_mask) + +; the set lists contains two columns of information. This comes into +; NCL as one long string that we need to parse out. + tmp = stringtocharacter(asciiread(wkdir+"/master_set10.txt",-1,"string")) + nvar = dimsizes(tmp(:,0)) + scale_type = new(nvar,"string") + vars = new(nvar,"string") + c13Flag = new(nvar,"string") + dynContour = new(nvar,"string") + do i=0,nvar-1 + scale_type(i) = charactertostring(tmp(i,0:12)) + dynContour(i) = charactertostring(tmp(i,13)) + vars(i) = charactertostring(tmp(i,16:)) + c13Flag(i) = charactertostring(tmp(i,16:19)) ; derived variable prefix + end do + delete(tmp) + +;************************************************ +; Cut off top and bottom percent to tighten contour intervals. 12/06 nanr +;************************************************ + pCut = 5 + pLow = 0.05 + pHigh = 0.95 + + res = True + res = set10Res(res) + +;************************************************* +; get case names and create filenames to read in +;************************************************* + nyrs = stringtointeger(getenv("clim_num_yrs_1")) + fyr = stringtointeger(getenv("clim_first_yr_1")) +;; nyrs = getenv("clim_num_yrs_1") +;; fyr = getenv("clim_first_yr_1") + sig_lvl = getenv("sig_lvl") + lyr = (fyr + nyrs)-1 ; for plotting + zoom = stringtointeger(getenv("reg_contour")); # (1 = SUB, 0 = GLOBAL) +;; zoom = getenv("reg_contour"); # (1 = SUB, 0 = GLOBAL) + if (zoom.eq.1)then + min_lat = stringtofloat(getenv("min_lat")) + max_lat = stringtofloat(getenv("min_lat")) + min_lon = stringtofloat(getenv("min_lon")) + max_lon = stringtofloat(getenv("min_lon")) + +;; min_lat = getenv("min_lat") +;; max_lat = getenv("min_lat") +;; min_lon = getenv("min_lon") +;; max_lon = getenv("min_lon") + end if + seasons = (/"DJF","JJA","MAM","SON","ANN"/) +;************************************************* +; common plot resources +;************************************************* + pres = True ; panel only resources + pres@gsnPanelYWhiteSpacePercent = 2 + pres@gsnMaximize = True ; make as large as possible + pres@gsnPaperOrientation = "portrait" + +; read in case strings + cases = new(2,string) + if (useCommon1 .eq. "True") then + name1 = "commonName_1" + else + name1 = "caseid_1" + end if + name11 = "prefix_1" + cases(0) = getenv(name1) + cases(1) = "Obs" + prefix = getenv(name11) + +; *************************** +; loop over seasons +; *************************** + + do n = 0,dimsizes(seasons)-1 + print("Processing season " + seasons(n)) +; pres@txString = seasons(n) + +; open model1 files + in1 = addfile(ptmpdir +"/"+prefix+"_"+seasons(n)+"_climo.nc","r") + +; open observation files + ptr_racmo = addfile(obsdata+"/RACMO2_GR_"+seasons(n)+"_climo.nc","r") ; leaf area index + +; extract years averaged for plotting + if (isatt(in1,"yrs_averaged")) then + yrs_ave1 = in1@yrs_averaged + else + yrs_ave1 = fyr+"-"+lyr + end if + + nlev = 0 + if (isfilevar(in1,"levsoi")) then + levsoi = in1->levsoi + nlev = getfilevardimsizes(in1,"levsoi") + end if + if (isfilevar(in1,"levgrnd")) then + levsoi = in1->levgrnd + nlev = getfilevardimsizes(in1,"levgrnd") + end if + if (nlev .eq. 0) + print("FATAL ERROR: nlev = 0") + exit + end if +; truncate soil layers if needed + if (nlev .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix(0)+" nlev: " + nlev +"]") + nlev = 10 + end if + + if (isfilevar(in1,"TLAKE") .and. isfilevar(in1,"levlak") ) then + levlak1 = in1->levlak + nlevlak1 = getfilevardimsizes(in1,"levlak") + end if + lat1 = in1->lat + nlat1 = dimsizes(lat1) + lon1 = in1->lon + nlon1 = dimsizes(lon1) + +; extract landfrac if paleo file + print(paleo) + if (paleo .eq. "True") then + landfrac = in1->landfrac + printVarSummary(landfrac) + oro = new((/nlat1,nlon1/),integer) + oro = 1 + oro@_FillValue = 0 + oro = where(landfrac .gt. 0.0, 1,0) +; fname = wkdir+prefix +; fname = wkdir+"basename" +; fname = wkdir+cases(0) + fname = wkdir+getenv(name1) + paleo_outline(oro,lat1,lon1,1., fname) + res = True + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; truncate soil layers if needed + if (nlev .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix+" nlev: " + nlev +"]") + nlev = 10 + end if + +; loop over variables + + snowFlag = 0 + + do i=0,dimsizes(vars)-1 + if (paleo .ne. "True") then + if (snowFlag .eq. 1) then ; run SNOWDP twice to see both obs datasets + i = i-1 + end if + if (vars(i) .eq. "SNOWDP") then + snowFlag = snowFlag + 1 + end if + end if + print("Processing variable " + vars(i)) + +; read in case 1 variable + info = var_init(vars(i)) + filevar = find_var_with_alts(in1, vars(i), info) +; now filevar is the name of the variable in the file (either +; vars(i) or one of its alternate names); if this variable was +; not found in the file, then filevar will be "" + + plot_check = True + if (isvar("x1")) then + delete(x1) + end if + + if (filevar .ne. "")then + if (c13Flag(i) .eq. "C13_") then + x1 = derive_del13C(vars(i),in1,scale_type(i),0,2) + else + x1 = in1->$filevar$ + end if + plot_check = True + delete(filevar) + else + if (info@derivedVariable .eq. True) then + x1 = derive_land( vars(i),in1) + else + print("variable "+ vars(i)+ " is not a defined variable.") + plot_check = False + continue + end if + end if +; catch NaNs + x1 = catchNaN(x1) + + if (all(ismissing(x1)))then + print("variable "+ vars(i)+ " contains all missing values.") + plot_check = False + delete(x1) + end if + + if (plot_check .eq. False) then + continue + else +; read in observations (if present) + obsFlag = 0 ; (0 = on; 1 = off) + if (isvar("x2") ) then + delete(x2) + end if + if (paleo .ne. "True") then + if (vars(i) .eq. "ASA" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2*100. ; fraction to % + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "QSOIL" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "U10" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FGR" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FLDS" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FIRE" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2 * -1. ; defined negative in RACMO2.3 + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "RAIN" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "PBOT" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "Q2M" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "QICE_FRZ" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "RH2M" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2 * 100. ; fraction to % + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "QICE_MELT" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FSH" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2 * -1. ; defined neg. in CESM + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "QICE" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "SNOW" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "QSNOMELT" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FSDS" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FSR" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2 * -1. ; defined as negative in RACMO2.3 + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "TSA" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2 - 273.16 ; K to C + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.3" + obsFlag = 1; + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if ; paleo + + if (paleo .eq. "True") then + obsFlag = 0 + printVarSummary(obsFlag) + end if + + if (obsFlag .eq. 1) then + nlat2 = dimsizes(lat2) + nlon2 = dimsizes(lon2) + end if + +; get info for @ varb and scale + info = var_init(vars(i)) + x1 = scale_var(x1,scale_type(i),info) + + if (x1@units .eq. "K") then + x1 = x1 - 273.15 ; convert K to C + useUnits = "C" + else + useUnits = x1@units + end if + + print("processing variable: " + vars(i) ) + +; process observations if present + if (obsFlag .eq. 1) ; (0 = on; 1 = off) + ; flip longitudes to -180 to 180 if needed + if (min(x2&lon) .ge. 0 .and. min(x1&lon) .lt. 0) then + x2 = lonFlip(x2) + end if + if (min(x1&lon) .ge. 0 .and. min(x2&lon) .lt. 0) then + x1 = lonFlip(x1) + end if + + ; interpolate data to new grid if necessary + if (nlat1 .ne. nlat2) then + if (nlat1 .gt. nlat2) then ; interpolate x1 to x2 grid + x1_interp = linint2_Wrap(lon1,lat1,x1,True,lon2,lat2,0) + x2_interp = x2 + else + x2_interp = linint2_Wrap(lon2,lat2,x2,True,lon1,lat1,0) + x1_interp = x1 + end if + else + x1_interp = x1 + x2_interp = x2 + end if +; different plots for observations + plot = new(3,graphic) + else + x1_interp = x1 + plot = new(1,graphic) + end if ; end observations + +; calculate size + rank = dimsizes(dimsizes(x1)) +;******************************************************************* +; 3D VARIABLES (time, lat, lon) +;******************************************************************* + if (rank.eq.3)then +; open postscript file and choose colormap + if (isvar("wks")) then + delete(wks) + end if + if (vars(i) .eq. "SNOWDP") then + if (snowFlag .eq. 1) then + wks = gsn_open_wks(plot_type,wkdir + "set10_" + seasons(n) + "_" + vars(i) + "_FOSTERDAVY") + else + wks = gsn_open_wks(plot_type,wkdir + "set10_" + seasons(n) + "_" + vars(i) + "_CMC") + end if + else + wks = gsn_open_wks(plot_type,wkdir + "set10_" + seasons(n) + "_" + vars(i)) + end if + if (isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap. 10 colors for case maps. + gsn_define_colormap(wks,cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) + end if +; calculate time average + if (obsFlag .eq. 1) then ; (1 = obs; 0 = no obs) + x2_interp = mask(x2_interp, (x1_interp(0,:,:) .eq. x1_interp@_FillValue), False) ; remove non-land points + var_avg_2 = x2_interp + end if + var_avg_1 = dim_avg_n_Wrap(x1_interp,0) ; time average + min1 = min(var_avg_1) + max1 = max(var_avg_1) +;************************************************************************** +; Set contour Levels: Dynamic vs pre-defined contours +;************************************************************************** +; dynContour: 1=dynamic; 0=explicit + if (info@cn_Explicit .eq. True .and. dynContour(i) .eq. 0 .and. user_defined .eq. 1) then + expFlag = True + else + expFlag = False + end if + res = True + res = set10Res(res) + if (colormap.eq.1) then + res@cnFillPalette = cmap1(13:240,:) + end if + if (min1.eq.max1) then + delete(res@cnLabelBarEndStyle) + end if + + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if +; plot Model (no obs) + if (obsFlag .eq. 0) then ; (1 = obs; 0 = no obs) +; set case 1 titles + if (expFlag .eq. True) then + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_Levels ; contour levels + else + if (c13Flag(i) .eq. "C13_") then + useMin1 = -40 ; hardcode boundaries to expectations + useMax1 = 0 ; hardcode boundaries to expectations + else + x = ndtooned(var_avg_1) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin1 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax1 = x(iHigh) + delete(x) + end if + maxLevel = 14 + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + + res@tiMainString = cases(0) + " (yrs " + yrs_ave1 + ")" + res@tiMainOffsetYF = 0.0 +; if (expFlag .eq. True) then +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; " Max = "+sprintf("%6.4g",max1) +; else +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; " Max = "+sprintf("%6.4g",max1)+" (+/-"+pCut+"%)" +; end if + res@gsnLeftString = vars(i) ; could also use info@longName if we want + res@gsnCenterString = seasons(n) + res@gsnRightString = useUnits ; assume same units for x1 and x2 + res@lbOrientation = "horizontal" + res@pmLabelBarWidthF = 0.4 + res@pmLabelBarHeightF = 0.085 + if (projection.eq.1) then + res@pmLabelBarOrthogonalPosF= 0.085 + else + res@pmLabelBarOrthogonalPosF= 0.05 + end if + res@lbLabelFontHeightF = 0.014 + res@gsnStringFontHeightF = 0.014 + res@tiMainFontHeightF = 0.018 + +; case 1 plot + plot = gsn_csm_contour_map(wks,var_avg_1,res) + + pres@txFontHeightF = res@tiMainFontHeightF + .003 + gsn_panel(wks,plot,(/1,1/),pres) ; draw panel plot + + delete(res) + delete(min1) + delete(max1) +; OBSERVATION PLOTS ----------------------------------------------------------------------------------------- + else ; model + observations +; ----------------------------------------------------------------------------------------------------------- +; panel 1 plot: case 1 plot + res = True + res = set10Res(res) + + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + min1 = doubletofloat(min(var_avg_1)) + max1 = doubletofloat(max(var_avg_1)) + + if (expFlag .eq. True) then + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_Levels ; contour levels + else + x = ndtooned(var_avg_1) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + + qsort(x) + iLow = floattointeger( pLow*nGood) + useMin1 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax1 = x(iHigh) + delete(x) + + maxLevel = 9 +;; added 2016/01/29 + if (vars(i) .eq. "SNOWDP") then + useMin12 = 0. + useMax12 = 1. + end if + if (vars(i) .eq. "H2OSNO") then + useMin12 = 0. + useMax12 = 500. + end if + + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMax1) + delete(useMin1) + end if + + res@gsnDraw = False ; for panel plots + res@lbLabelStride = 1 + res@lbLabelFontHeightF = 0.018 + res@gsnStringFontHeightF = 0.018 + res@tiMainFontHeightF = 0.022 + res@pmLabelBarHeightF = 0.4 + res@pmLabelBarWidthF = 0.085 + res@pmLabelBarOrthogonalPosF= 0.05 + + res@tiMainString = cases(0) + " (yrs " + yrs_ave1 + ")" + res@tiMainOffsetYF = -0.01 + res@gsnLeftString = "" ; info@longName ; could also use info@longName if we want + res@gsnCenterString = "" + res@gsnRightString = "" ; useUnits ; assume same units for x1 and x2 + res@lbOrientation = "Vertical" + delete(min1) + delete(max1) + + plot(0) = gsn_csm_contour_map(wks,var_avg_1,res) +; save settings to plot obs + +; panel 2 plot: case 2 plot + + check = isdouble(var_avg_2) + if (check .eq. True) then + min1 = doubletofloat(min(var_avg_2)) + max1 = doubletofloat(max(var_avg_2)) + else + min1 = min(var_avg_2) + max1 = max(var_avg_2) + end if + delete(check) + res@tiMainString = cases(1) +; if (expFlag .eq. True) then +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; "~C~Max = "+sprintf("%6.4g",max1) +; else +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; "~C~Max = "+sprintf("%6.4g",max1)+" (+/-"+pCut+"%)" +; end if + delete(min1) + delete(max1) + + plot(1) = gsn_csm_contour_map(wks,var_avg_2,res) + delete(res) + +; panel 3 difference plot + + if ( isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag12.rgb") ; read in colormap. 12 colors for diff maps. + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + + res = True + res = set10Res(res) + + if (colormap.eq.1) then + res@cnFillPalette = cmap1(13:240,:) + delete(cmap1) + end if + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + res@gsnLeftString = "" ;info@longName ; could also use info@longName if we want + res@gsnCenterString = "" + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + res@lbOrientation = "Vertical" + res@lbLabelStride = 1 + res@lbLabelFontHeightF = 0.018 + res@gsnStringFontHeightF = 0.018 + res@tiMainFontHeightF = 0.022 + res@pmLabelBarHeightF = 0.40 + res@pmLabelBarWidthF = 0.085 + res@pmLabelBarOrthogonalPosF= 0.05 + + diff = var_avg_1 ; trick to maintain meta data + check = isdouble(var_avg_2) + if (check .eq. True) then + diff = var_avg_1 - doubletofloat(var_avg_2) + else + diff = var_avg_1 - var_avg_2 + end if + delete(check) + mindiff = min(diff) + maxdiff = max(diff) + + if (expFlag .eq. True) then +; use larger contour intervals for Model vs Obs + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiffObs ; contour levels + else + sd = dim_stddev(ndtooned(diff)) + + if (vars(i).eq."PREC") then + useMin1 = -2.*sd ; = mindiff + useMax1 = 2.*sd ; = maxdiff + else + useMin1 = -2.5*sd ; = mindiff + useMax1 = 2.5*sd ; = maxdiff + end if + if (c13Flag(i) .eq. "C13_") then + useMin1 = -40 ; hardcode boundaries to expectations + useMax1 = 0 ; hardcode boundaries to expectations + end if + delete(sd) + maxLevel = 13 + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + + res@tiMainString = cases(0) +" - "+cases(1) + res@tiMainOffsetYF = -.01 + plot(2) = gsn_csm_contour_map(wks,diff,res) + delete(res) + +; panel plot + pres@txFontHeightF = 0.02 + pres@txString = seasons(n)+" "+info@longName+" ("+useUnits+")" + +;; ASB hard code the wks height and width - need this to be a env var + wks@wkWidth = 1500 + wks@wkHeight = 1500 + + gsn_panel(wks,plot,(/1,3/),pres) ; draw panel plot + pres@txString = "" + + delete(x2_interp) + delete(var_avg_2) + delete(diff) + delete(mindiff) + delete(maxdiff) + end if ; end obsFlag + ; remove explicit labels + else ; end 3D variables +;************************************************************************* +; 4D VARIABLES (time, lev, lat, lon) +;************************************************************************* +; calculate 4D average + print("Starting 4D variables") + + var_avg_1 = dim_avg_n_Wrap(x1,0) ; time average + + do lev=0,2 ; assume both cases have same number of levels. + if ( lev .eq. 0 ) then + k = 0 + end if + if ( lev .eq. 1) then + k = 4 + end if + if ( lev .eq. 2) then + k = 9 + end if + +; open postscript file (4D) + if (isvar("wks")) then + delete(wks) + end if + wks = gsn_open_wks(plot_type,wkdir + "set10_" + seasons(n)+"_"+vars(i)+"_"+k) + if (isvar("cmap") ) then + delete(cmap) + end if + + res = True + res = set10Res(res) + + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap. 10 colors for case maps. + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) ;read_colormap_file("ncl_default") + res@cnFillPalette = cmap1(13:240,:) + delete(cmap1) + end if + + res@gsnLeftString = info@longName ; could also use info@longName if we want + res@gsnRightString = useUnits ; assume same units for x1 and x2 + res@lbOrientation = "Vertical" + res@gsnCenterString = "Level = " + k + +; extract landfrac if paleo file + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; set case 1 titles (4D) + res@tiMainString = cases(0) + " (yrs " + yrs_ave1 + ")" + min1 = min(var_avg_1(k,:,:)) + max1 = max(var_avg_1(k,:,:)) +; if (expFlag .eq. True) then +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; " Max = "+sprintf("%6.4g",max1) +; else +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; " Max = "+sprintf("%6.4g",max1)+" (+/-"+pCut+"%)" +; end if + res@gsnLeftString = vars(i) ; could also use info@longName if we want + res@gsnRightString = useUnits ; assume same units for x1 and x2 + +; set explicit contour levels (4D) + if (expFlag .eq. True) then + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + st = "cn_Level"+k ; get cn levels for a specific levsoi + res@cnLevels = info@$st$ + else + x = ndtooned(var_avg_1(k,:,:)) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin1 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax1 = x(iHigh) + delete(x) + + maxLevel = 9 + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + + res@tiMainString = cases(0) + " (yrs " + yrs_ave1 + ")" + res@tiMainOffsetYF = 0.0 +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+ \ + ; " Max = "+sprintf("%6.4g",max1) + res@gsnLeftString = vars(i) ; could also use info@longName if we want + res@gsnCenterString = seasons(n) + res@gsnRightString = useUnits ; assume same units for x1 and x2 + res@lbOrientation = "horizontal" + res@pmLabelBarWidthF = 0.4 + res@pmLabelBarHeightF = 0.085 + + if (projection.eq.1) then + res@pmLabelBarOrthogonalPosF= 0.085 + else + res@pmLabelBarOrthogonalPosF= 0.05 + end if + res@lbLabelFontHeightF = 0.014 + res@gsnStringFontHeightF = 0.014 + res@tiMainFontHeightF = 0.018 + +; 4D case 1 plot + plot(0) = gsn_csm_contour_map(wks,var_avg_1(k,:,:),res) + + +;; ASB hard code the wks height and width - need this to be a env var + wks@wkWidth = 1500 + wks@wkHeight = 1500 + +; panel plot + gsn_panel(wks,plot,(/1,1/),pres) ; draw panel plot + + delete([/res,min1,max1/]) + end do ; level loop + +; plot Observed vs Model + end if ; 4d variables + delete([/var_avg_1,x1,x1_interp,plot/]) + end if ; end plot_check + if (isvar("useUnits")) then + delete(useUnits) + end if + if (isvar("lon2")) then + delete(lon2) + end if + if (isvar("lat2")) then + delete(lat2) + end if + end do ; end variable loop + end do ; seasons loop + + ; remove error file if program completes successfully. + filo = wkdir +"/set10_error_file" + system("/bin/rm -f " + filo) +end diff --git a/lnd_diag/model-obs/set_11.ncl b/lnd_diag/model-obs/set_11.ncl new file mode 100755 index 00000000..d3e1f7a5 --- /dev/null +++ b/lnd_diag/model-obs/set_11.ncl @@ -0,0 +1,917 @@ +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$DIAG_SHARED/$VAR_MASTER" +load "$DIAG_SHARED/lnd_func.ncl" +load "$DIAG_SHARED/find_var_with_alts.ncl" +;************************************************ +; NOTE: adapted from plot_cons.ncl (KOleson code) +; Revision Author: Nan Rosenbloom Spring 2005 +; Revision Author 2: Jan Lenaerts Spring 2017 +;************************************************ +begin + print ("=========================================") + print ("Starting: set_11.ncl") + print ("Start Time: "+systemfunc("date") ) + print ("=========================================") + wkdir = getenv("WKDIR") + ptmpdir = getenv("PTMPDIR_1") + obsdata = getenv("OBS_DATA") + obs_res = getenv("OBS_RES") + paleo = getenv("paleo") + useCommon1 = getenv("UseCommonName_1") + raster = getenv("raster") + land_mask = getenv("land_mask1") + user_defined = getenv("expContours") + cn = getenv("CN") + colormap = getenv("colormap") + projection=getenv("projection") + cn = getenv("CN") + plot_type = getenv("PLOTTYPE") + + fland_mask = stringtofloat(land_mask) + +; the set lists contains two columns of information. This comes into +; NCL as one long string that we need to parse out. + tmp = stringtocharacter(asciiread(wkdir+"/master_set11.txt",-1,"string")) + nvar = dimsizes(tmp(:,0)) + scale_type = new(nvar,"string") + vars = new(nvar,"string") + c13Flag = new(nvar,"string") + dynContour = new(nvar,"string") + do i=0,nvar-1 + scale_type(i) = charactertostring(tmp(i,0:12)) + dynContour(i) = charactertostring(tmp(i,13)) + vars(i) = charactertostring(tmp(i,16:)) + c13Flag(i) = charactertostring(tmp(i,16:19)) ; derived variable prefix + end do + delete(tmp) + +;************************************************ +; Cut off top and bottom percent to tighten contour intervals. 12/06 nanr +;************************************************ + pCut = 5 + pLow = 0.05 + pHigh = 0.95 + + res = True + res = set11Res(res) + +;************************************************* +; get case names and create filenames to read in +;************************************************* + nyrs = stringtointeger(getenv("clim_num_yrs_1")) + fyr = stringtointeger(getenv("clim_first_yr_1")) +;; nyrs = getenv("clim_num_yrs_1") +;; fyr = getenv("clim_first_yr_1") + sig_lvl = getenv("sig_lvl") + lyr = (fyr + nyrs)-1 ; for plotting + zoom = stringtointeger(getenv("reg_contour")); # (1 = SUB, 0 = GLOBAL) +;; zoom = getenv("reg_contour"); # (1 = SUB, 0 = GLOBAL) + if (zoom.eq.1)then + min_lat = stringtofloat(getenv("min_lat")) + max_lat = stringtofloat(getenv("min_lat")) + min_lon = stringtofloat(getenv("min_lon")) + max_lon = stringtofloat(getenv("min_lon")) + +;; min_lat = getenv("min_lat") +;; max_lat = getenv("min_lat") +;; min_lon = getenv("min_lon") +;; max_lon = getenv("min_lon") + end if + seasons = (/"DJF","JJA","MAM","SON","ANN"/) +;************************************************* +; common plot resources +;************************************************* + pres = True ; panel only resources + pres@gsnPanelYWhiteSpacePercent = 2 + pres@gsnMaximize = True ; make as large as possible + pres@gsnPaperOrientation = "portrait" + +; read in case strings + cases = new(2,string) + if (useCommon1 .eq. "True") then + name1 = "commonName_1" + else + name1 = "caseid_1" + end if + name11 = "prefix_1" + cases(0) = getenv(name1) + cases(1) = "Obs" + prefix = getenv(name11) + +; *************************** +; loop over seasons +; *************************** + + do n = 0,dimsizes(seasons)-1 + print("Processing season " + seasons(n)) +; pres@txString = seasons(n) + +; open model1 files + in1 = addfile(ptmpdir +"/"+prefix+"_"+seasons(n)+"_climo.nc","r") + +; open observation files + ptr_racmo = addfile(obsdata+"/RACMO2_ANT_"+seasons(n)+"_climo.nc","r") ; leaf area index + +; extract years averaged for plotting + if (isatt(in1,"yrs_averaged")) then + yrs_ave1 = in1@yrs_averaged + else + yrs_ave1 = fyr+"-"+lyr + end if + + nlev = 0 + if (isfilevar(in1,"levsoi")) then + levsoi = in1->levsoi + nlev = getfilevardimsizes(in1,"levsoi") + end if + if (isfilevar(in1,"levgrnd")) then + levsoi = in1->levgrnd + nlev = getfilevardimsizes(in1,"levgrnd") + end if + if (nlev .eq. 0) + print("FATAL ERROR: nlev = 0") + exit + end if +; truncate soil layers if needed + if (nlev .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix(0)+" nlev: " + nlev +"]") + nlev = 10 + end if + + if (isfilevar(in1,"TLAKE") .and. isfilevar(in1,"levlak") ) then + levlak1 = in1->levlak + nlevlak1 = getfilevardimsizes(in1,"levlak") + end if + lat1 = in1->lat + nlat1 = dimsizes(lat1) + lon1 = in1->lon + nlon1 = dimsizes(lon1) + +; extract landfrac if paleo file + print(paleo) + if (paleo .eq. "True") then + landfrac = in1->landfrac + printVarSummary(landfrac) + oro = new((/nlat1,nlon1/),integer) + oro = 1 + oro@_FillValue = 0 + oro = where(landfrac .gt. 0.0, 1,0) +; fname = wkdir+prefix +; fname = wkdir+"basename" +; fname = wkdir+cases(0) + fname = wkdir+getenv(name1) + paleo_outline(oro,lat1,lon1,1., fname) + res = True + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; truncate soil layers if needed + if (nlev .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix+" nlev: " + nlev +"]") + nlev = 10 + end if + +; loop over variables + + snowFlag = 0 + + do i=0,dimsizes(vars)-1 + if (paleo .ne. "True") then + if (snowFlag .eq. 1) then ; run SNOWDP twice to see both obs datasets + i = i-1 + end if + if (vars(i) .eq. "SNOWDP") then + snowFlag = snowFlag + 1 + end if + end if + print("Processing variable " + vars(i)) + +; read in case 1 variable + info = var_init(vars(i)) + filevar = find_var_with_alts(in1, vars(i), info) +; now filevar is the name of the variable in the file (either +; vars(i) or one of its alternate names); if this variable was +; not found in the file, then filevar will be "" + + plot_check = True + if (isvar("x1")) then + delete(x1) + end if + + if (filevar .ne. "")then + if (c13Flag(i) .eq. "C13_") then + x1 = derive_del13C(vars(i),in1,scale_type(i),0,2) + else + x1 = in1->$filevar$ + end if + plot_check = True + delete(filevar) + else + if (info@derivedVariable .eq. True) then + x1 = derive_land( vars(i),in1) + else + print("variable "+ vars(i)+ " is not a defined variable.") + plot_check = False + continue + end if + end if +; catch NaNs + x1 = catchNaN(x1) + + if (all(ismissing(x1)))then + print("variable "+ vars(i)+ " contains all missing values.") + plot_check = False + delete(x1) + end if + + if (plot_check .eq. False) then + continue + else +; read in observations (if present) + obsFlag = 0 ; (0 = on; 1 = off) + if (isvar("x2") ) then + delete(x2) + end if + if (paleo .ne. "True") then + if (vars(i) .eq. "ASA" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2*100. ; fraction to % + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "QSOIL" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "U10" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FGR" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FLDS" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FIRE" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2 * -1. ; defined negative in RACMO2.4 + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "RAIN" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "PBOT" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "Q2M" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "QICE_FRZ" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "RH2M" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2 * 100. ; fraction to % + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "QICE_MELT" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FSH" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2 * -1. ; defined neg. in CESM + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "QICE" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "SNOW" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "QSNOMELT" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FSDS" ) then + x2 = ptr_racmo->$vars(i)$ + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FSR" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2 * -1. ; defined as negative in RACMO2.4 + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "TSA" ) then + x2 = ptr_racmo->$vars(i)$ + x2 = x2 - 273.16 ; K to C + lon2 = ptr_racmo->lon + lat2 = ptr_racmo->lat + cases(1) = "RACMO2.4" + obsFlag = 1; + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if ; paleo + + if (paleo .eq. "True") then + obsFlag = 0 + printVarSummary(obsFlag) + end if + + if (obsFlag .eq. 1) then + nlat2 = dimsizes(lat2) + nlon2 = dimsizes(lon2) + end if + +; get info for @ varb and scale + info = var_init(vars(i)) + x1 = scale_var(x1,scale_type(i),info) + + if (x1@units .eq. "K") then + x1 = x1 - 273.15 ; convert K to C + useUnits = "C" + else + useUnits = x1@units + end if + + print("processing variable: " + vars(i) ) + +; process observations if present + if (obsFlag .eq. 1) ; (0 = on; 1 = off) + ; flip longitudes to -180 to 180 if needed + if (min(x2&lon) .ge. 0 .and. min(x1&lon) .lt. 0) then + x2 = lonFlip(x2) + end if + if (min(x1&lon) .ge. 0 .and. min(x2&lon) .lt. 0) then + x1 = lonFlip(x1) + end if + + ; interpolate data to new grid if necessary + if (nlat1 .ne. nlat2) then + if (nlat1 .gt. nlat2) then ; interpolate x1 to x2 grid + x1_interp = linint2_Wrap(lon1,lat1,x1,True,lon2,lat2,0) + x2_interp = x2 + else + x2_interp = linint2_Wrap(lon2,lat2,x2,True,lon1,lat1,0) + x1_interp = x1 + end if + else + x1_interp = x1 + x2_interp = x2 + end if +; different plots for observations + plot = new(3,graphic) + else + x1_interp = x1 + plot = new(1,graphic) + end if ; end observations + +; calculate size + rank = dimsizes(dimsizes(x1)) +;******************************************************************* +; 3D VARIABLES (time, lat, lon) +;******************************************************************* + if (rank.eq.3)then +; open postscript file and choose colormap + if (isvar("wks")) then + delete(wks) + end if + if (vars(i) .eq. "SNOWDP") then + if (snowFlag .eq. 1) then + wks = gsn_open_wks(plot_type,wkdir + "set11_" + seasons(n) + "_" + vars(i) + "_FOSTERDAVY") + else + wks = gsn_open_wks(plot_type,wkdir + "set11_" + seasons(n) + "_" + vars(i) + "_CMC") + end if + else + wks = gsn_open_wks(plot_type,wkdir + "set11_" + seasons(n) + "_" + vars(i)) + end if + if (isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap. 10 colors for case maps. + gsn_define_colormap(wks,cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) + end if +; calculate time average + if (obsFlag .eq. 1) then ; (1 = obs; 0 = no obs) + x2_interp = mask(x2_interp, (x1_interp(0,:,:) .eq. x1_interp@_FillValue), False) ; remove non-land points + var_avg_2 = x2_interp + end if + var_avg_1 = dim_avg_n_Wrap(x1_interp,0) ; time average + min1 = min(var_avg_1) + max1 = max(var_avg_1) +;************************************************************************** +; Set contour Levels: Dynamic vs pre-defined contours +;************************************************************************** +; dynContour: 1=dynamic; 0=explicit + if (info@cn_Explicit .eq. True .and. dynContour(i) .eq. 0 .and. user_defined .eq. 1) then + expFlag = True + else + expFlag = False + end if + res = True + res = set11Res(res) + if (colormap.eq.1) then + res@cnFillPalette = cmap1(13:240,:) + end if + if (min1.eq.max1) then + delete(res@cnLabelBarEndStyle) + end if + + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if +; plot Model (no obs) + if (obsFlag .eq. 0) then ; (1 = obs; 0 = no obs) +; set case 1 titles + if (expFlag .eq. True) then + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_Levels ; contour levels + else + if (c13Flag(i) .eq. "C13_") then + useMin1 = -40 ; hardcode boundaries to expectations + useMax1 = 0 ; hardcode boundaries to expectations + else + x = ndtooned(var_avg_1) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin1 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax1 = x(iHigh) + delete(x) + end if + maxLevel = 14 + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + + res@tiMainString = cases(0) + " (yrs " + yrs_ave1 + ")" + res@tiMainOffsetYF = 0.0 +; if (expFlag .eq. True) then +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; " Max = "+sprintf("%6.4g",max1) +; else +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; " Max = "+sprintf("%6.4g",max1)+" (+/-"+pCut+"%)" +; end if + res@gsnLeftString = vars(i) ; could also use info@longName if we want + res@gsnCenterString = seasons(n) + res@gsnRightString = useUnits ; assume same units for x1 and x2 + res@lbOrientation = "horizontal" + res@pmLabelBarWidthF = 0.4 + res@pmLabelBarHeightF = 0.085 + if (projection.eq.1) then + res@pmLabelBarOrthogonalPosF= 0.085 + else + res@pmLabelBarOrthogonalPosF= 0.05 + end if + res@lbLabelFontHeightF = 0.014 + res@gsnStringFontHeightF = 0.014 + res@tiMainFontHeightF = 0.018 + +; case 1 plot + plot = gsn_csm_contour_map(wks,var_avg_1,res) + + pres@txFontHeightF = res@tiMainFontHeightF + .003 + gsn_panel(wks,plot,(/1,1/),pres) ; draw panel plot + + delete(res) + delete(min1) + delete(max1) +; OBSERVATION PLOTS ----------------------------------------------------------------------------------------- + else ; model + observations +; ----------------------------------------------------------------------------------------------------------- +; panel 1 plot: case 1 plot + res = True + res = set11Res(res) + + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + min1 = doubletofloat(min(var_avg_1)) + max1 = doubletofloat(max(var_avg_1)) + + if (expFlag .eq. True) then + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_Levels ; contour levels + else + x = ndtooned(var_avg_1) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + + qsort(x) + iLow = floattointeger( pLow*nGood) + useMin1 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax1 = x(iHigh) + delete(x) + + maxLevel = 9 +;; added 2016/01/29 + if (vars(i) .eq. "SNOWDP") then + useMin12 = 0. + useMax12 = 1. + end if + if (vars(i) .eq. "H2OSNO") then + useMin12 = 0. + useMax12 = 500. + end if + + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMax1) + delete(useMin1) + end if + + res@gsnDraw = False ; for panel plots + res@lbLabelStride = 1 + res@lbLabelFontHeightF = 0.018 + res@gsnStringFontHeightF = 0.018 + res@tiMainFontHeightF = 0.022 + res@pmLabelBarHeightF = 0.4 + res@pmLabelBarWidthF = 0.085 + res@pmLabelBarOrthogonalPosF= 0.05 + + res@tiMainString = cases(0) + " (yrs " + yrs_ave1 + ")" + res@tiMainOffsetYF = -0.01 + res@gsnLeftString = "" ; info@longName ; could also use info@longName if we want + res@gsnCenterString = "" + res@gsnRightString = "" ; useUnits ; assume same units for x1 and x2 + res@lbOrientation = "Vertical" + delete(min1) + delete(max1) + + plot(0) = gsn_csm_contour_map(wks,var_avg_1,res) +; save settings to plot obs + +; panel 2 plot: case 2 plot + + check = isdouble(var_avg_2) + if (check .eq. True) then + min1 = doubletofloat(min(var_avg_2)) + max1 = doubletofloat(max(var_avg_2)) + else + min1 = min(var_avg_2) + max1 = max(var_avg_2) + end if + delete(check) + res@tiMainString = cases(1) +; if (expFlag .eq. True) then +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; "~C~Max = "+sprintf("%6.4g",max1) +; else +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; "~C~Max = "+sprintf("%6.4g",max1)+" (+/-"+pCut+"%)" +; end if + delete(min1) + delete(max1) + + plot(1) = gsn_csm_contour_map(wks,var_avg_2,res) + delete(res) + +; panel 3 difference plot + + if ( isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag12.rgb") ; read in colormap. 12 colors for diff maps. + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + + res = True + res = set11Res(res) + + if (colormap.eq.1) then + res@cnFillPalette = cmap1(13:240,:) + delete(cmap1) + end if + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + res@gsnLeftString = "" ;info@longName ; could also use info@longName if we want + res@gsnCenterString = "" + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + res@lbOrientation = "Vertical" + res@lbLabelStride = 1 + res@lbLabelFontHeightF = 0.018 + res@gsnStringFontHeightF = 0.018 + res@tiMainFontHeightF = 0.022 + res@pmLabelBarHeightF = 0.40 + res@pmLabelBarWidthF = 0.085 + res@pmLabelBarOrthogonalPosF= 0.05 + + diff = var_avg_1 ; trick to maintain meta data + check = isdouble(var_avg_2) + if (check .eq. True) then + diff = var_avg_1 - doubletofloat(var_avg_2) + else + diff = var_avg_1 - var_avg_2 + end if + delete(check) + mindiff = min(diff) + maxdiff = max(diff) + + if (expFlag .eq. True) then +; use larger contour intervals for Model vs Obs + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiffObs ; contour levels + else + sd = dim_stddev(ndtooned(diff)) + + if (vars(i).eq."PREC") then + useMin1 = -2.*sd ; = mindiff + useMax1 = 2.*sd ; = maxdiff + else + useMin1 = -2.5*sd ; = mindiff + useMax1 = 2.5*sd ; = maxdiff + end if + if (c13Flag(i) .eq. "C13_") then + useMin1 = -40 ; hardcode boundaries to expectations + useMax1 = 0 ; hardcode boundaries to expectations + end if + delete(sd) + maxLevel = 13 + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + + res@tiMainString = cases(0) +" - "+cases(1) + res@tiMainOffsetYF = -.01 + plot(2) = gsn_csm_contour_map(wks,diff,res) + delete(res) + +; panel plot + pres@txFontHeightF = 0.02 + pres@txString = seasons(n)+" "+info@longName+" ("+useUnits+")" + +;; ASB hard code the wks height and width - need this to be a env var + wks@wkWidth = 1500 + wks@wkHeight = 1500 + + gsn_panel(wks,plot,(/1,3/),pres) ; draw panel plot + pres@txString = "" + + delete(x2_interp) + delete(var_avg_2) + delete(diff) + delete(mindiff) + delete(maxdiff) + end if ; end obsFlag + ; remove explicit labels + else ; end 3D variables +;************************************************************************* +; 4D VARIABLES (time, lev, lat, lon) +;************************************************************************* +; calculate 4D average + print("Starting 4D variables") + + var_avg_1 = dim_avg_n_Wrap(x1,0) ; time average + + do lev=0,2 ; assume both cases have same number of levels. + if ( lev .eq. 0 ) then + k = 0 + end if + if ( lev .eq. 1) then + k = 4 + end if + if ( lev .eq. 2) then + k = 9 + end if + +; open postscript file (4D) + if (isvar("wks")) then + delete(wks) + end if + wks = gsn_open_wks(plot_type,wkdir + "set11_" + seasons(n)+"_"+vars(i)+"_"+k) + if (isvar("cmap") ) then + delete(cmap) + end if + + res = True + res = set11Res(res) + + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap. 10 colors for case maps. + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) ;read_colormap_file("ncl_default") + res@cnFillPalette = cmap1(13:240,:) + delete(cmap1) + end if + + res@gsnLeftString = info@longName ; could also use info@longName if we want + res@gsnRightString = useUnits ; assume same units for x1 and x2 + res@lbOrientation = "Vertical" + res@gsnCenterString = "Level = " + k + +; extract landfrac if paleo file + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; set case 1 titles (4D) + res@tiMainString = cases(0) + " (yrs " + yrs_ave1 + ")" + min1 = min(var_avg_1(k,:,:)) + max1 = max(var_avg_1(k,:,:)) +; if (expFlag .eq. True) then +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; " Max = "+sprintf("%6.4g",max1) +; else +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+\ +; " Max = "+sprintf("%6.4g",max1)+" (+/-"+pCut+"%)" +; end if + res@gsnLeftString = vars(i) ; could also use info@longName if we want + res@gsnRightString = useUnits ; assume same units for x1 and x2 + +; set explicit contour levels (4D) + if (expFlag .eq. True) then + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + st = "cn_Level"+k ; get cn levels for a specific levsoi + res@cnLevels = info@$st$ + else + x = ndtooned(var_avg_1(k,:,:)) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin1 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax1 = x(iHigh) + delete(x) + + maxLevel = 9 + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + + res@tiMainString = cases(0) + " (yrs " + yrs_ave1 + ")" + res@tiMainOffsetYF = 0.0 +; res@lbTitleString = "Min = "+sprintf("%6.4g",min1)+ \ + ; " Max = "+sprintf("%6.4g",max1) + res@gsnLeftString = vars(i) ; could also use info@longName if we want + res@gsnCenterString = seasons(n) + res@gsnRightString = useUnits ; assume same units for x1 and x2 + res@lbOrientation = "horizontal" + res@pmLabelBarWidthF = 0.4 + res@pmLabelBarHeightF = 0.085 + + if (projection.eq.1) then + res@pmLabelBarOrthogonalPosF= 0.085 + else + res@pmLabelBarOrthogonalPosF= 0.05 + end if + res@lbLabelFontHeightF = 0.014 + res@gsnStringFontHeightF = 0.014 + res@tiMainFontHeightF = 0.018 + +; 4D case 1 plot + plot(0) = gsn_csm_contour_map(wks,var_avg_1(k,:,:),res) + + +;; ASB hard code the wks height and width - need this to be a env var + wks@wkWidth = 1500 + wks@wkHeight = 1500 + +; panel plot + gsn_panel(wks,plot,(/1,1/),pres) ; draw panel plot + + delete([/res,min1,max1/]) + end do ; level loop + +; plot Observed vs Model + end if ; 4d variables + delete([/var_avg_1,x1,x1_interp,plot/]) + end if ; end plot_check + if (isvar("useUnits")) then + delete(useUnits) + end if + if (isvar("lon2")) then + delete(lon2) + end if + if (isvar("lat2")) then + delete(lat2) + end if + end do ; end variable loop + end do ; seasons loop + + ; remove error file if program completes successfully. + filo = wkdir +"/set11_error_file" + system("/bin/rm -f " + filo) +end diff --git a/lnd_diag/model-obs/set_12.ncl b/lnd_diag/model-obs/set_12.ncl new file mode 100755 index 00000000..9feb9419 --- /dev/null +++ b/lnd_diag/model-obs/set_12.ncl @@ -0,0 +1,1452 @@ +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$DIAG_SHARED/$VAR_MASTER" +load "$DIAG_SHARED/lnd_func.ncl" +;************************************************ +; NOTE: Adapted from plot_tpr.ncl, plot_eb.ncl, +; plot_fire.ncl (KOleson code). +; Revision Author: Nan Rosenbloom Spring 2005 +;************************************************ + +begin + print ("=========================================") + print ("Starting: set_12.ncl") + print ("Start Time: "+systemfunc("date") ) + print ("=========================================") + inputdir = getenv("INPUT_FILES") + resdir = getenv("DIAG_RESOURCES") + diag_home = getenv("DIAG_HOME") + obsdata = getenv("OBS_DATA") + plot_type = getenv("PLOTTYPE") + cn = getenv("CN") + clamp = getenv("CLAMP") + casa = getenv("CASA") + hydroFlag = getenv("HYDRO") + paleo = getenv("paleo") + land_mask = getenv("land_mask1") + flandmask = stringtofloat(land_mask) + useCommon1 = getenv("UseCommonName_1") + +;************************************************* +; get case names and create filenames to read in +;************************************************* + wkdir = getenv("WKDIR") + ptmpdir = getenv("PTMPDIR_1") +;; fyr = stringtointeger(getenv("clim_first_yr_1")) + fyr = getenv("clim_first_yr_1") + +;*********************************************** +; import regional information +;*********************************************** + + secondsPerDay = 86400. + months = 12 ; number of months + regions_file = addfile(resdir+"/region_definitions.nc","r") + box_n = regions_file->BOX_N + box_w = regions_file->BOX_W + box_e = regions_file->BOX_E + box_s = regions_file->BOX_S + nreg = dimsizes(box_s) ; number of regions + tmp = regions_file->PS_ID + ps_id = charactertostring(tmp) + delete(tmp) + tmp = regions_file->PTITSTR + ptitstr = charactertostring(tmp) + delete(tmp) + tmp = regions_file->BOXSTR + boxstr = charactertostring(tmp) + delete(tmp) + + +;************************************************* +; main loop +;************************************************* + +nplots = 9 +nmons = 13 + +; loop over plots +do p=0,nplots-1 + + print("Now printing plot #" + p) + if (p .eq. 0) then + if (cn .eq. 1) then + fileName = "set12_cn_landFlx.txt" + else + fileName = "set12_landFlx.txt" + end if + plot_name = "landf" + titstr = new(7,"string") + titstr(0) = "2m Air Temperature (~S~o~N~K)" + titstr(1) = "Precipitation (mm day~S~-1~N~)" + titstr(2) = "Runoff (mm day~S~-1~N~)" + titstr(3) = "Snow Depth (m)" + titstr(4) = "Latent Heat Flux (W m~S~-2~N~)" + titstr(5) = "Photosynthesis (gC m~S~-2~N~day~S~-1~N~)" + titstr(6) = "Leaf Area Index" + end if + if (p .eq. 1) then + fileName = "set12_radFlx.txt" + plot_name = "radf" + titstr = new(7,"string") + titstr(0) = "Incoming Solar" + titstr(1) = "Albedo" + titstr(2) = "Absorbed Solar" + titstr(3) = "Incoming Longwave" + titstr(4) = "Emitted Longwave" + titstr(5) = "Net Longwave" + titstr(6) = "Net Radiation" + end if + if (p .eq. 2) then + fileName = "set12_turbFlx.txt" + plot_name = "turbf" + titstr = new(10,"string") + titstr(0) = "Net Radiation" + titstr(1) = "Sensible Heat" + titstr(2) = "Latent Heat" + titstr(3) = "Transpiration" + titstr(4) = "Canopy Evaporation" + titstr(5) = "Ground Evaporation" + titstr(6) = "Ground Heat + Snow Melt" + titstr(7) = "Soil moisture factor (BTRAN)" + titstr(8) = "Evaporative Fraction" + titstr(9) = "Total LAI" + end if + if (p .eq. 3) then + if (clamp .eq. 1) then + fileName = "set12_clampFlx.txt" + plot_name = "cnFlx" + titstr = new(6,"string") + titstr(0) = "Net Ecosystem Exchange" + titstr(1) = "GPP" + titstr(2) = "NPP" + titstr(3) = "Autotrophic Respiration" + titstr(4) = "Heterotrophic Respiration" + titstr(5) = "Net Ecocystem Production" + else + if (cn .eq. 1) then + fileName = "set12_cnFlx.txt" + plot_name = "cnFlx" + titstr = new(10,"string") + titstr(0) = "Net Ecosystem Exchange" + titstr(1) = "GPP" + titstr(2) = "NPP" + titstr(3) = "Autotrophic Respiration" + titstr(4) = "Heterotrophic Respiration" + titstr(5) = "Ecosystem Respiration" + titstr(6) = "Surface CH4 Flux" + titstr(7) = "Leached Mineral Nitrogen" + titstr(8) = "Soil NO3 Pool Loss to Leaching" + titstr(9) = "Soil NO3 Pool Loss to Runoff" + else + if (casa .eq. 1) then + fileName = "set12_casaFlx.txt" + plot_name = "cnFlx" + titstr = new(5,"string") + titstr(0) = "Net Ecosystem Exchange" + titstr(1) = "GPP" + titstr(2) = "NPP" + titstr(3) = "Autotrophic Respiration" + titstr(4) = "Heterotrophic Respiration" + else + continue + end if + end if + end if + end if + if (p .eq. 4) then + if (cn .eq. 1) then + fileName = "set12_fireFlx.txt" + plot_name = "frFlx" + titstr = new(6,"string") + titstr(0) = "Column-Level Fire C Loss" + titstr(1) = "Column-Level Fire N Loss" + titstr(2) = "PFT-Level Fire C Loss" + titstr(3) = "PFT-Level Fire N Loss" + titstr(4) = "Fractional Area Burned" + titstr(5) = "Fractional Area Burned - Crop" + else + continue + end if + end if + if (p .eq. 5) then + fileName = "set12_moistEnergyFlx.txt" + plot_name = "moistEnergyFlx" + titstr = new(3,"string") + titstr(0) = "Precipitation" + titstr(1) = "Net Radiation" + titstr(2) = "Evapotranspiration" + + end if + if (p .eq. 6) then + if (hydroFlag .eq. 1) then + fileName = "set12_hydro.txt" + plot_name = "hydro" + titstr = new(5,"string") + titstr(0) = "Water in Unconfined Aquifer" ; WA + titstr(1) = "Water Table Depth" ; ZWT + titstr(2) = "Aquifer Recharge Rate" ; QCHARGE + titstr(3) = "Frac Water Table at Surface" ; FCOV + titstr(4) = "Total Water Storage" ; TWS + else + continue + end if + end if + if (p .eq. 7) then + fileName = "set12_snow.txt" + plot_name = "snow" + if (paleo .eq. "True") then ; paleo run, no obs + titstr = new(4,"string") + titstr(0) = "Snow height" ; SNOWDP + titstr(1) = "Fractional Snow Cover" ; FSNO + titstr(2) = "Snow Water Equivalent" ; H2OSNO + else ; obs + titstr = new(4,"string") + titstr(0) = "Snow height (USAF/CMC)" ; SNOWDP + titstr(1) = "Fractional Snow Cover (NOAA-AVHRR)"; FSNO + titstr(2) = "Snow Water Equivalent (CMC)" ; H2OSNO + end if + end if + if (p .eq. 8) then + fileName = "set12_albedo.txt" + plot_name = "albedo" + titstr = new(5,"string") + titstr(0) = "Visible BlackSky Albedo" ; VBSA + titstr(1) = "NearIR BlackSky Albedo" ; NBSA + titstr(2) = "Visible WhiteSky Albedo" ; VWSA + titstr(3) = "NearIR WhiteSky Albedo" ; NWSA + titstr(4) = "All Sky Albedo" ; ASA + end if +;---------------------------------------------------------- +; read in file and parse into variables +;---------------------------------------------------------- + tmp = stringtocharacter(asciiread(inputdir+"/"+fileName,-1,"string")) + nvars = dimsizes(tmp(:,0)) + nplotsPerCol = (nvars+1)/2 + scale_type = new(nvars,"string") + vars = new(nvars,"string") + vcheck1 = new(nvars,"string") + do i=0,nvars-1 + scale_type(i) = charactertostring(tmp(i,0:12)) + vars(i) = charactertostring(tmp(i,16:)) + end do + delete(tmp) + + plot_check = True ; 20dec04 - nanr; added so that plot_check would be defined for underived vars. + + +; Define common plots resources -------- + if (p .eq. 5) + plot = new(1,graphic) + else + plot = new(nvars,graphic) + end if + + res = True + res@xyLineThicknesses = (/2.,2.,2./) + res@xyDashPatterns = (/0.,16.,16./) ; solid, dashed + res@xyLineColors = (/"red","blue","black"/) + res@tmXBMinorOn = False + res@tiMainOn = False + res@gsnDraw = False + res@gsnFrame = False + res@tiXAxisFontHeightF = 0.030 + res@tiYAxisFontHeightF = 0.030 + res@txFontHeightF = 0.030 + ;res@tmXBLabelFontHeightF = 0.030 + res@tmYLLabelFontHeightF = 0.030 + res@tmYLFormat = "0@!;*?5g" + + +; creating x-axis labels + month = ispan(1,months+1,1) + month!0 = "month" + res@trXMinF = 1 + res@trXMaxF = months+1 + res@tmXBMode = "Explicit" + res@tmXBValues = month + res@tmXBLabels = (/"J","F","M","A","M","J","J","A","S","O","N","D","J"/) + res@pmLegendDisplayMode = "Never" + res@vpHeightF = 0.4 + res@vpWidthF = 0.8 + + vv = ispan(0,nvars-1,1) + mm = ispan(0,months-1,1) + +;****************************** +; Read model data +;****************************** + cases = new(4,"string") + if (useCommon1 .eq. "True") then + name1 = "commonName_1" + else + name1 = "caseid_1" + end if + cases(0) = getenv(name1) + cases(1) = "No Obs" + name11 = "prefix_1" + prefix = getenv(name11) + in1 = addfile(ptmpdir+"/"+prefix+"MONS_climo.nc","r") + print ("DEBUG file in1 = "+ptmpdir+"/"+prefix+"MONS_climo.nc") + if(isfilevar(in1,"levsoi")) then + nlev1 = getfilevardimsizes(in1,"levsoi") + end if + if(isfilevar(in1,"levgrnd")) then + nlev1 = getfilevardimsizes(in1,"levgrnd") + end if + fland1 = in1->landfrac + area1 = in1->area + time1 = in1->time + lat1 = in1->lat + lon1 = in1->lon + nlat1 = dimsizes(lat1) + nlon1 = dimsizes(lon1) + if (isatt(in1,"yrs_averaged")) then + yrs_ave1 = in1@yrs_averaged + cases(0) = cases(0)+" (yrs "+yrs_ave1+")" + end if + if (lon1(0) .ge. 0.) then + area1 = lonFlip(area1) + fland1 = lonFlip(fland1) + end if + + +;****************************** +; Read observed data +;****************************** + + if (p .eq. 0) then ; read observations for landf variables + + ; T, P, and runoff at 05 degree + ; these variables read in a datamask and weights from their nc file. + + ptr_wm = addfile(obsdata+"/0_5_WILLMOTT_ALLMONS_climo.nc","r") ; temp,precip + case2 = ptr_wm@case_id + lat2 = ptr_wm->lat + nlat2 = dimsizes(lat2) + lon2 = ptr_wm->lon + nlon2 = dimsizes(lon2) + ptr_grdc = addfile(obsdata+"/0_5_GRDC_ALLMONS_climo.nc","r") ; runoff + case2_r = ptr_grdc@case_id + ptr_fd = addfile(obsdata+"/0_5_FOSTERDAVY_ALLMONS_climo.nc","r") ; snowdepth + case2_sd = ptr_fd@case_id + + landmask2 = new((/nvars,nlat2,nlon2/),"double") + area2 = new((/nvars,nlat2,nlon2/),"double") + + ; 1x1 degree = landmask3 - snow data is at 1 degree + + ptr_scf = addfile(obsdata+"/NOAA_AVHRR_SNOWF_ALLMONS_climo.100318.nc","r") ; snow cover + ptr_sd = addfile(obsdata+"/CMC_SNOWD_ALLMONS_climo.100318.nc","r") ; snow depth + ptr_swe = addfile(obsdata+"/CMC_SWE_ALLMONS_climo.100318.nc","r") ; swe + + lat3 = ptr_sd->lat + nlat3 = dimsizes(lat3) + lon3 = ptr_sd->lon + nlon3 = dimsizes(lon3) + + landmask3 = new((/nvars,nlat3,nlon3/),"double") + area3 = new((/nvars,nlat3,nlon3/),"double") + + ; 0.9x1.25 degree = landmask5 - lhf and gpp data is at 0.9x.125 degree + + ptr_lhf = addfile(obsdata+"/MR_LHF_0.9x1.25_ALLMONS_climo.nc","r") ; lhf + ptr_gpp = addfile(obsdata+"/MR_GPP_0.9x1.25_ALLMONS_climo.nc","r") ; gpp + + lat5 = ptr_lhf->lat + nlat5 = dimsizes(lat5) + lon5 = ptr_lhf->lon + nlon5 = dimsizes(lon5) + + landmask5 = new((/nvars,nlat5,nlon5/),"double") + area5 = new((/nvars,nlat5,nlon5/),"double") + + ptr_lai = addfile(obsdata+"/MODIS_LAI_ALLMONS_climo.nc","r") ; lai + + lat7 = ptr_lai->lat + nlat7 = dimsizes(lat7) + lon7 = ptr_lai->lon + nlon7 = dimsizes(lon7) + + landmask7 = new((/nvars,nlat7,nlon7/),"float") + area7 = new((/nvars,nlat7,nlon7/),"float") + + landmask2!0 = "var" + landmask2&var = vv + landmask2!1 = "lat" + landmask2&lat = lat2 + landmask2!2 = "lon" + landmask2&lon = lon2 + area2!0 = "var" + area2&var = vv + area2!1 = "lat" + area2&lat = lat2 + area2!2 = "lon" + area2&lon = lon2 + + landmask3!0 = "var" + landmask3&var = vv + landmask3!1 = "lat" + landmask3!2 = "lon" + landmask3&lat = lat3 + landmask3&lon = lon3 + area3!0 = "var" + area3&var = vv + area3!1 = "lat" + area3&lat = lat3 + area3!2 = "lon" + area3&lon = lon3 + + landmask5!0 = "var" + landmask5&var = vv + landmask5!1 = "lat" + landmask5!2 = "lon" + landmask5&lat = lat5 + landmask5&lon = lon5 + area5!0 = "var" + area5&var = vv + area5!1 = "lat" + area5&lat = lat5 + area5!2 = "lon" + area5&lon = lon5 + + landmask7!0 = "var" + landmask7&var = vv + landmask7!1 = "lat" + landmask7!2 = "lon" + landmask7&lat = lat7 + landmask7&lon = lon7 + area7!0 = "var" + area7&var = vv + area7!1 = "lat" + area7&lat = lat7 + area7!2 = "lon" + area7&lon = lon7 + + end if + if (p .eq. 8) then + ; 64x128 degree = landmask4 - albedo data is at T42 + ptr_alb = addfile(obsdata+"/T42_MODIS_ALLMONS_climo.070523.nc","r") ; albedo + + ; 64x128 degree = landmask4 - albedo data is at T42 + ptr_asa = addfile(obsdata+"/modisradweighted.nc","r") ; ASA albedo + + lat4 = ptr_alb->lat + nlat4 = dimsizes(lat4) + lon4 = ptr_alb->lon + nlon4 = dimsizes(lon4) + + landmask4 = new((/nvars,nlat4,nlon4/),"double") + area4 = new((/nvars,nlat4,nlon4/),"double") + landmask4!0 = "var" + landmask4&var = vv + landmask4!1 = "lat" + landmask4!2 = "lon" + landmask4&lat = lat4 + landmask4&lon = lon4 + area4!0 = "var" + area4&var = vv + area4!1 = "lat" + area4&lat = lat4 + area4!2 = "lon" + area4&lon = lon4 + array4 = new((/nvars, months, nlat4, nlon4/),"double") ; T42 + + end if + + sFlag = new((/nvars/),"integer") + aFlag = new((/nvars/),"integer") + sFlag = 0 + aFlag = 0 +;************************** +; Variable loop ----------- read both cases of each variable into memeory. +;************************** + array1 = new((/nvars, months, nlat1, nlon1/),"double") ; T42 + array2 = new((/nvars, months, nlat2, nlon2/),"double") ; 05 degree + array3 = new((/nvars, months, nlat3, nlon3/),"double") ; 1 degree + array5 = new((/nvars, months, nlat5, nlon5/),"double") ; 0.9x1.25 degree + array7 = new((/nvars, months, nlat7, nlon7/),"float") ; 0.5 degree + + + nv = nvars+1 + units = new(nvars,"string") + long_name = new(nvars,"string") + +; read all variables into memory first and stuff them into two arrays (2 cases) + pcheck1 = 0 + snowFlag = 0 + do i=0,nvars-1 + + print("vars = " + vars(i) ) + if (paleo .ne. "True") then + if (vars(i) .eq. "SNOWDP") then + snowFlag = snowFlag + 1 + end if + end if + +; check for variables in case 1 + vcheck1(i) = True + info = var_init(vars(i)) + if(isfilevar(in1,vars(i)))then + x1 = in1->$vars(i)$ + plot_check = True + else + if (info@derivedVariable .eq. True) then + x1 = derive_land( vars(i),in1) + else + print("variable "+ vars(i)+ " is not a defined variable in case1.") + pcheck1 = pcheck1 + 1 + vcheck1(i) = False + end if + end if + + if (pcheck1 .eq. nvars) then + plot_check = False + end if + + +;read in observations (if present) + obsFlag = 0 ; (1 = on; 0 = off) + if (paleo .ne. "True") then + if (vars(i) .eq. "TSA" ) then + x2 = ptr_wm->$vars(i)$ + l2 = ptr_wm->datamask + a2 = ptr_wm->weight + cases(1) = "Willmott-Matsuura(T,P),GRDC(R),USAF/ETAC(S),FLUXNET(LHF,GPP)" + obsFlag = 1 + sFlag(i) = 0 + aFlag(i) = 0 + else + if (vars(i) .eq. "PREC" .and. p .ne. 5 ) then + x2 = ptr_wm->PREC + l2 = ptr_wm->datamask + a2 = ptr_wm->weight + obsFlag = 1 + cases(1) = "Willmott-Matsuura(T,P),GRDC(R),USAF/ETAC(S),FLUXNET(LHF,GPP)" + sFlag(i) = 0 + aFlag(i) = 0 + else + if (vars(i) .eq. "TOTRUNOFF" ) then + x2 = ptr_grdc->RUNOFF + l2 = ptr_grdc->datamask + a2 = ptr_grdc->weight + obsFlag = 1 + cases(1) = "Willmott-Matsuura(T,P),GRDC(R),USAF/ETAC(S)" + sFlag(i) = 0 + aFlag(i) = 0 + else + if (vars(i) .eq. "SNOWDP" .and. snowFlag .eq. 1) then + if (isvar("x2")) then + delete(x2) + end if + x2 = ptr_fd->$vars(i)$ + l2 = ptr_fd->datamask + a2 = ptr_fd->weight + cases(1) = "FOSTERDAVY" + x2@_FillValue = 1e30 + obsFlag = 1 + sFlag(i) = 0 + aFlag(i) = 0 + else + if (vars(i) .eq. "FSNO" ) then + if (isvar("x3")) then + delete(x3) + end if + x3 = ptr_scf->SCF + x3 = x3 * 0.01 ; convert from percent to 0-1 + l3 = ptr_scf->landmask + lf = ptr_scf->landfrac + a3 = ptr_scf->weight + cases(2) = "NOAA AVHRR (1967-2003)" + x3@_FillValue = 1e30 + obsFlag = 1 + sFlag(i) = 1 + aFlag(i) = 0 + else + if (vars(i) .eq. "H2OSNO" ) then + if (isvar("x3")) then + delete(x3) + end if + x3 = ptr_swe->SWE + l3 = ptr_swe->landmask + lf = ptr_swe->landfrac + a3 = ptr_swe->weight + cases(3) = "CMC (1980-1996)" + x3@_FillValue = 1e30 + obsFlag = 1 + sFlag(i) = 1; + aFlag(i) = 0; + else + if (vars(i) .eq. "SNOWDP" .and. snowFlag .eq. 2) then + if (isvar("x3")) then + delete(x3) + end if + x3 = ptr_sd->SNOWD + l3 = ptr_sd->landmask + lf = ptr_sd->landfrac + a3 = ptr_sd->weight + cases(3) = "CMC (1980-1996)" + x3@_FillValue = 1e30 + obsFlag = 1 + sFlag(i) = 1 + aFlag(i) = 0 + else + if (vars(i) .eq. "VBSA") then + if (isvar("x4")) then + delete(x4) + end if + x4 = ptr_alb->VBSA + cases(3) = "MODIS (2001-2003)" + l4 = ptr_alb->LANDFRAC + a4 = ptr_alb->weight + x4@_FillValue = 1e30 + obsFlag = 1; + sFlag(i) = 0; + aFlag(i) = 1; + else + if (vars(i) .eq. "NBSA") then + if (isvar("x4")) then + delete(x4) + end if + x4 = ptr_alb->NBSA + cases(3) = "MODIS (2001-2003)" + l4 = ptr_alb->LANDFRAC + a4 = ptr_alb->weight + x4@_FillValue = 1e30 + obsFlag = 1; + sFlag(i) = 0; + aFlag(i) = 1; + else + if (vars(i) .eq. "VWSA") then + if (isvar("x4")) then + delete(x4) + end if + x4 = ptr_alb->VWSA + cases(3) = "MODIS (2001-2003)" + l4 = ptr_alb->LANDFRAC + a4 = ptr_alb->weight + x4@_FillValue = 1e30 + obsFlag = 1; + sFlag(i) = 0; + aFlag(i) = 1; + else + if (vars(i) .eq. "NWSA") then + if (isvar("x4")) then + delete(x4) + end if + x4 = ptr_alb->NWSA + cases(3) = "MODIS (2001-2003)" + l4 = ptr_alb->LANDFRAC + a4 = ptr_alb->weight + x4@_FillValue = 1e30 + obsFlag = 1; + sFlag(i) = 0; + aFlag(i) = 1; + else + if (vars(i) .eq. "ASA") then ; read ASA from different modis file (ptr_asa) + if (isvar("x4")) then + delete(x4) + end if + x4 = ptr_asa->BRDALB ; broadband albedo + cases(3) = "MODIS (2001-2003)" + l4 = ptr_alb->LANDFRAC ; use LANDFRAC and weight from ptr_alb file. + a4 = ptr_alb->weight + x4@_FillValue = 1e30 + obsFlag = 1; + sFlag(i) = 0; + aFlag(i) = 1; + else + if (vars(i) .eq. "LHEAT") then + x5 = ptr_lhf->LHF + l5 = ptr_lhf->datamask + a5 = ptr_lhf->area + obsFlag = 1 + cases(1) = "Willmott-Matsuura(T,P),GRDC(R),USAF/ETAC(S),FLUXNET(LHF,GPP)" + sFlag(i) = 0 + aFlag(i) = 0 + else + if ((vars(i) .eq. "FPSN" .and. cn .eq. 0) .or. vars(i) .eq. "GPP") then + x5 = ptr_gpp->GPP + l5 = ptr_gpp->datamask + a5 = ptr_gpp->area + obsFlag = 1 + cases(1) = "Willmott-Matsuura(T,P),GRDC(R),USAF/ETAC(S),FLUXNET(LHF,GPP)" + sFlag(i) = 0 + aFlag(i) = 0 + else + if (vars(i) .eq. "TLAI") then + x7 = ptr_lai->TLAI + l7 = ptr_lai->landmask + a7 = ptr_lai->area + obsFlag = 1 + sFlag(i) = 0 + else + obsFlag = 0 + sFlag(i) = 0 + aFlag(i) = 0 + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if ; paleo + + if (paleo .eq. "True") then + obsFlag = 0 + end if + + if (vcheck1(i) .eq. True) then + ; catch NaNs + x1 = catchNaN(x1) + + ; flip variable to match regions definitions (-180-180) + if (min(x1&lon) .ge. 0) then + x1 = lonFlip(x1) + end if + + x1 = scale_var(x1,scale_type(i),info) + + if (obsFlag .eq. 1) then ; process variables with observed data + if (isvar("x2")) then + x2 = catchNaN(x2) + x2!0 = "mons" + x2&mons = mm + x2!1 = "lat" + x2!2 = "lon" + x2&lat = lat2 + x2&lon = lon2 + if (min(x2&lon) .ge. 0) then + x2 = lonFlip(x2) + end if + a2!0 = "lat" + a2!1 = "lon" + l2!0 = "lat" + l2!1 = "lon" + a2&lat = lat2 + a2&lon = lon2 + l2&lat = lat2 + l2&lon = lon2 + if (min(a2&lon) .ge. 0) then + a2 = lonFlip(a2) + end if + if (min(l2&lon) .ge. 0) then + l2 = lonFlip(l2) + end if + array2(i,:,:,:) = x2 ; Wrapper fcn that copies coord Vars + landmask2(i,:,:) = l2 + area2(i,:,:) = a2 + delete(l2) + delete(a2) + delete(x2) + end if + + if (isvar("x3") .and. sFlag(i) .eq. 1) then + x3 = catchNaN(x3) + x3!0 = "mons" + x3&mons = mm + x3!1 = "lat" + x3!2 = "lon" + x3&lat = lat3 + x3&lon = lon3 + if (min(x3&lon) .ge. 0) then + x3 = lonFlip(x3) + end if + a3!0 = "lat" + a3!1 = "lon" + l3!0 = "lat" + l3!1 = "lon" + lf!0 = "lat" + lf!1 = "lon" + a3&lat = lat3 + a3&lon = lon3 + l3&lat = lat3 + l3&lon = lon3 + lf&lat = lat3 + lf&lon = lon3 + if (min(a3&lon) .ge. 0) then + a3 = lonFlip(a3) + end if + if (min(l3&lon) .ge. 0) then + l3 = lonFlip(l3) + end if + if (min(lf&lon) .ge. 0) then + lf = lonFlip(lf) + end if + + ; mask out fractional grid cells. + l31d = ndtooned(l3) + lf1d = ndtooned(lf) + l31d = mask(l31d, lf1d .ne. 1, False) + l3 = onedtond(l31d,(/nlat3,nlon3/)) + + array3(i,:,:,:) = x3 ; Wrapper fcn that copies coord Vars + landmask3(i,:,:) = l3 + area3(i,:,:) = a3 + delete(l3) + delete(a3) + delete(lf) + end if + + if (isvar("x4") .and. aFlag(i) .eq. 1) then + x4 = catchNaN(x4) + x4!0 = "mons" + x4&mons = mm + x4!1 = "lat" + x4!2 = "lon" + x4&lat = lat4 + x4&lon = lon4 + if (min(x4&lon) .ge. 0) then + x4 = lonFlip(x4) + end if + a4!0 = "lat" + a4!1 = "lon" + l4!0 = "lat" + l4!1 = "lon" + a4&lat = lat4 + a4&lon = lon4 + l4&lat = lat4 + l4&lon = lon4 + if (min(a4&lon) .ge. 0) then + a4 = lonFlip(a4) + end if + if (min(l4&lon) .ge. 0) then + l4 = lonFlip(l4) + end if + array4(i,:,:,:) = x4 ; Wrapper fcn that copies coord Vars + landmask4(i,:,:) = l4 + area4(i,:,:) = a4 + delete(l4) + delete(a4) + delete(x4) + end if + + if (isvar("x5")) then + x5 = catchNaN(x5) + x5!0 = "mons" + x5&mons = mm + x5!1 = "lat" + x5!2 = "lon" + x5&lat = lat5 + x5&lon = lon5 + if (min(x5&lon) .ge. 0) then + x5 = lonFlip(x5) + end if + a5!0 = "lat" + a5!1 = "lon" + l5!0 = "lat" + l5!1 = "lon" + a5&lat = lat5 + a5&lon = lon5 + l5&lat = lat5 + l5&lon = lon5 + if (min(a5&lon) .ge. 0) then + a5 = lonFlip(a5) + end if + if (min(l5&lon) .ge. 0) then + l5 = lonFlip(l5) + end if + array5(i,:,:,:) = x5 ; Wrapper fcn that copies coord Vars + landmask5(i,:,:) = l5 + area5(i,:,:) = a5 + delete(l5) + delete(a5) + delete(x5) + end if + + if (isvar("x7")) then + x7 = catchNaN(x7) + x7!0 = "mons" + x7&mons = mm + x7!1 = "lat" + x7&lat = lat7 + x7!2 = "lon" + x7&lon = lon7 + a7!0 = "lat" + a7!1 = "lon" + l7!0 = "lat" + l7!1 = "lon" + a7&lat = lat7 + a7&lon = lon7 + l7&lat = lat7 + l7&lon = lon7 + + if (min(x7&lon) .ge. 0) then + x7 = lonFlip(x7) + end if + if (min(a7&lon) .ge. 0) then + a7 = lonFlip(a7) + end if + if (min(l7&lon) .ge. 0) then + l7 = lonFlip(l7) + end if + + array7(i,:,:,:) = x7 ; Wrapper fcn that copies coord Vars + landmask7(i,:,:) = l7 + area7(i,:,:) = a7 + delete(l7) + delete(a7) + delete(x7) + end if + + end if + + array1(i,:,:,:) = x1 ; time, lat, lon + units(i) = x1@units + long_name(i) = x1@long_name + delete(x1) + + end if + if (paleo .ne. "True") then + if (snowFlag .eq. 1) then ; run SNOWDP twice to see both obs datasets + i = i-1 + end if + end if + + end do ; end variable loop + print ("End variable loop") + + +;********************************** +; now plot vars by region +;********************************** +if(plot_check.eq.True)then + if (paleo .eq. "True") then + startRegion=nreg-3 ; N. and S. Hemisphere Land, global + else + startRegion=0 + end if + do region=startRegion,nreg-1 + +; open ps file for plotting + wks = gsn_open_wks(plot_type,wkdir + "set12_" + plot_name + "_" + ps_id(region)) + + pres = True ; panel only resources +; create common legend + pres@txString = ptitstr(region)+boxstr(region) ; common title + +; each regional plot contains all variables, so loop over vars +; moistEnergyFlx plot draws three variabls on one plot. Other plots have only 1 variable per plot. + if (p .eq. 5) then + data = new((/nvars,nmons/),"double") + labels = new(3,"string") + do i = 0,nvars-1 + + ; process model results + compare = "model" + if(vcheck1(i) .eq. True) then + x1 = array1(i,:,:,:) + if (vars(i) .eq. "RNET") then + x1 = (x1 / 2.501e6) * secondsPerDay + end if + var_plot1 = regional_values(x1,region,area1,fland1,scale_type(i),\ + regions_file,time1,nmons,compare) + data(i,:) = var_plot1 + + res@tiYAxisString = "mm/d" + labels(i) = titstr(i) + + delete(var_plot1) + delete(x1) + end if + end do + res@gsnLeftString = cases(0) + plot(0) = gsn_csm_xy(wks,month,data,res) + delete(res@gsnLeftString) + delete(data) + else + do i = 0,nvars-1 + if (vcheck1(i) .eq. True) then + +; snowdp plot draws 2 obs + 1 variable on snowdp plot. Other plots have only 1 variable per plot. + if (vars(i) .eq. "SNOWDP") then + + if (paleo .eq. "True") then ; paleo runs, no obs + data = new((/1,nmons/),"double") + else + data = new((/3,nmons/),"double") + end if + res@xyDashPatterns = (/0.,16.,16./) ; solid, dashed + res@xyLineColors = (/"red","blue","black"/) ; model,FOSTER,CMC + + ; process model results + compare = "model" + if (isvar("x1")) then + delete(x1) + end if + x1 = array1(i,:,:,:) + var_plot1 = regional_values(x1,region,area1,fland1,scale_type(i),\ + regions_file,time1,nmons,compare) + data(0,:) = var_plot1 + delete(var_plot1) + delete(x1) + + if (paleo .eq. "False") then ; paleo runs, no obs + ; process 1st set of obs (FOSTER = dashed line) + compare = "obs" + if (isvar("x2")) then + delete(x2) + end if + x2 = array2(i,:,:,:) + var_plot2 = regional_values(x2,region,area2(i,:,:),landmask2(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + data(1,:) = var_plot2 + delete(var_plot2) + delete(x2) + + ; 2nd SNOWDP obs (CMC = solid line) + if (isvar("x3")) then + delete(x3) + end if + x3 = array3(i,:,:,:) + var_plot2 = regional_values(x3,region,area3(i,:,:),landmask3(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + data(2,:) = var_plot2 + delete(var_plot2) + delete(x3) + + end if ; paleo + res@tiYAxisString = "mm/d" + res@gsnCenterString = titstr(i) + + plot(i) = gsn_csm_xy(wks,month,data,res) + delete(data) + else + if (vars(i) .eq. "FSNO") then ; NOAA-AVHRR (black) + delete(res@xyLineColors) + res@xyLineColors = (/"red","black","black"/) + end if + if (vars(i) .eq. "H2OSNO") then ; CMC (black) + delete(res@xyLineColors) + res@xyLineColors = (/"red","black","blue"/) + end if + if (aFlag(i) .eq. 0) then + delete(res@xyLineColors) + res@xyLineColors = (/"red","black","black"/) + end if + + ; process model + if (isvar("x1")) then + delete(x1) + end if + x1 = array1(i,:,:,:) + compare = "model" + if (vars(i) .eq. "ALBEDO" .or. aFlag(i) .eq. 0) then ; any albedo variable + var_plot1 = regional_albedo(x1,region,area1,fland1,scale_type(i),\ + regions_file,nmons,compare) + else + var_plot1 = regional_values(x1,region,area1,fland1,scale_type(i),\ + regions_file,time1,nmons,compare) + end if + delete(x1) + + ; process observations, if available + if (obsFlag .eq. 1) then ; process variables with observed data + data = new((/2,nmons/),"double") + compare = "obs" + if (sFlag(i) .eq. 1) then ; processing snow vars at 1x1 degree + if (isvar("x3")) then + delete(x3) + end if + x3 = array3(i,:,:,:) + var_plot2 = regional_values(x3,region,area3(i,:,:),landmask3(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x3) + else + if (aFlag(i) .eq. 1) then ; processing albedo variables (T42) + if (isvar("x4")) then + delete(x4) + end if + x4 = array4(i,:,:,:) + var_plot2 = regional_albedo(x4,region,area4(i,:,:),landmask4(i,:,:),scale_type(i),\ + regions_file,nmons,compare) + delete(x4) + else + if (isvar("x2")) then + delete(x2) + end if + x2 = array2(i,:,:,:) + var_plot2 = regional_values(x2,region,area2(i,:,:),landmask2(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x2) + if (vars(i) .eq. "LHEAT") then + if (isvar("x5")) then + delete(x5) + end if + x5 = array5(i,:,:,:) + var_plot2 = regional_values(x5,region,area5(i,:,:),landmask5(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x5) + else + if ((vars(i) .eq. "FPSN" .and. cn .eq. 0) .or. vars(i) .eq. "GPP") then + if (isvar("x5")) then + delete(x5) + end if + x5 = array5(i,:,:,:) + var_plot2 = regional_values(x5,region,area5(i,:,:),landmask5(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x5) + else + if (vars(i) .eq. "TLAI") then + if (isvar("x7")) then + delete(x7) + end if + x7 = array7(i,:,:,:) + var_plot2 = regional_values(x7,region,area7(i,:,:),landmask7(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x7) + end if + end if + end if + end if + end if + data(1,:) = var_plot2 + else + data = new((/1,nmons/),"double") + end if + + data(0,:) = var_plot1 + + res@tiYAxisString = units(i) + res@gsnCenterString = titstr(i) + + ; now stuff it into a graphics array variable for plotting after all variables are done for + ; this region. + plot(i) = gsn_csm_xy(wks,month,data,res) + + delete(data) + delete(var_plot1) + delete(res@gsnCenterString) + end if ; end SNOWDP + end if + end do ; end variable loop + end if ; end plot 5 fork + +; and plot it already using gsn_panel to print all plots onto it from plot array + + +;************************************************* +; gsnpanel takes this plot array and massages it and puts it onto page for us. +;************************************************* + + if (isdefined("lgres")) + delete(lgres) + end if + lgres = True ; allow legend resources + lgres@lgLineColors = res@xyLineColors ; set these equal to those + lgres@lgDashIndexes = res@xyDashPatterns ; ditto + lgres@lgPerimOn = False + lgres@lgLabelFontHeightF = 0.015 ; increase font height + lgres@lgLineLabelStrings = (/"","",""/) ; no labels in lines + lgres@lgAutoManage = False ; must have to adjust fonts + lgres@vpWidthF = 0.45 + lgres@vpHeightF = 0.10 + + pres@gsnFrame = False ; don't advance yet so add legend + pres@gsnMaximize = True ; make as large as possible + pres@gsnPaperOrientation = "Portrait" + pres@gsnPanelBottom = 0.15 ; save space for legend + pres@gsnPanelRowSpec = True + pres@gsnPanelCenter = True + + if (p .eq. 0) then ; landf (6 vars) + cases(1) = "Willmott-Matsuura(T,P),GRDC(R),USAF/ETAC(S),FLUXNET(LHF,GPP),MODIS(LAI)" + cases(2) = "CMC 1980-1996" + pres@gsnPanelCenter = False + pres@gsnPanelYWhiteSpacePercent = 4 + pres@gsnPanelXWhiteSpacePercent = 4 + gsn_panel(wks,plot,(/2,2,2,2/),pres) ; draw panel plot + lgres@vpWidthF = 0.80 + lgres@lgLineColors = (/"red","blue","black"/) ; set these equal to those + lgres@lgDashIndexes = res@xyDashPatterns ; ditto + gsn_legend_ndc(wks,3,cases(0:2),0.15,0.15,lgres) ; draw common legend MAY HAVE TO ADJUST Y + end if + if (p .eq. 1) then ; radf (7 vars) + pres@gsnPanelCenter = False + gsn_panel(wks,plot,(/2,2,2,1/),pres) + legend = create "Legend" legendClass wks + "vpXF" : 0.51 + "vpYF" : 0.33 + "vpWidthF" : 0.25 + "vpHeightF" : 0.125 + "lgPerimOn" : False + "lgItemCount" : 2 + "lgLabelsOn" : True + "lgLineLabelsOn" : False + "lgLabelStrings" : cases(0:1) + "lgDashIndexes" : (/0,1/) + "lgLineColors" : (/"red","blue"/) + "lgMonoLineLabelFontColor" : True + "lgAutoManage" : False + "lgLabelFontHeightF" : 0.0125 + "lgLabelPosition" : "Center" + "lgLabelAlignment" : "AboveItems" + end create + draw(legend) + delete(legend) + end if + if (p .eq. 2) then ; turbf (9) + pres@gsnPanelCenter = False + gsn_panel(wks,plot,(/2,2,2,2,2/),pres) + legend = create "Legend" legendClass wks + "vpXF" : 0.20 + "vpYF" : 0.16 + "vpWidthF" : 0.20 + "vpHeightF" : 0.1 + "lgPerimOn" : False + "lgItemCount" : 2 + "lgLabelsOn" : True + "lgLineLabelsOn" : False + "lgLabelStrings" : cases(0:1) + "lgDashIndexes" : (/0,1/) + "lgLineColors" : (/"red","blue"/) + "lgMonoLineLabelFontColor" : True + "lgAutoManage" : False + "lgLabelFontHeightF" : 0.015 + "lgLabelPosition" : "Center" + "lgLabelAlignment" : "AboveItems" + end create + draw(legend) + delete(legend) + end if + if (p .eq. 3 ) then ; 3 and 4 are cn and fire (10 vars each) + cases(1) = "No Obs" + pres@gsnPanelCenter = False + lgres@lgLabelFontHeightF = 0.025 + if (clamp .eq. 1 .or. casa .eq. 1) then + gsn_panel(wks,plot,(/2,2,2/),pres) ; draw panel plot + gsn_legend_ndc(wks,1,cases(0:1),0.275,0.13,lgres) ; draw common legend + else + gsn_panel(wks,plot,(/2,2,2,2,2/),pres) ; draw panel plot +; gsn_legend_ndc(wks,1,cases(0:1),0.275,0.10,lgres) ; draw common legend + legend = create "Legend" legendClass wks + "vpXF" : 0.30 + "vpYF" : 0.16 + "vpWidthF" : 0.20 + "vpHeightF" : 0.1 + "lgPerimOn" : False + "lgItemCount" : 2 + "lgLabelsOn" : True + "lgLineLabelsOn" : False + "lgLabelStrings" : cases(0:1) + "lgDashIndexes" : (/0,0/) + "lgLineColors" : (/"red","blue"/) + "lgMonoLineLabelFontColor" : True + "lgAutoManage" : False + "lgLabelFontHeightF" : 0.015 + "lgLabelPosition" : "Center" + "lgLabelAlignment" : "AboveItems" + end create + draw(legend) + delete(legend) + end if + lgres@lgLabelFontHeightF = 0.015 + end if + if (p .eq. 4) then ; 3 and 4 are cn and fire (4 vars each) + cases(1) = "No Obs" + pres@gsnPanelCenter = False + gsn_panel(wks,plot,(/2,2,2/),pres) ; draw panel plot + gsn_legend_ndc(wks,2,cases(0:1),0.35,0.10,lgres) ; draw common legend + end if + if (p .eq. 5) then + pres@gsnPanelCenter = True + gsn_panel(wks,plot,(/1,1/),pres) ; draw panel plot + lgres@lgLabelFontHeightF = 0.015 + gsn_legend_ndc(wks,3,labels,0.275,0.52,lgres); draw common legend + delete(labels) + end if + if (p .eq. 6) then + cases(1) = "No Obs" + pres@gsnPanelCenter = True + gsn_panel(wks,plot,(/2,2,1/),pres) ; draw panel plot + lgres@lgLabelFontHeightF = 0.028 + gsn_legend_ndc(wks,2,cases,0.275,0.17,lgres) ; draw common legend + lgres@lgLabelFontHeightF = 0.015 + end if + if (p .eq. 7) then ; snow + cases(1) = "USAF" + cases(2) = "CMC/NOAA-AVHRR" + delete(lgres) + lgres = True ; allow legend resources + lgres@lgPerimOn = False + lgres@lgLabelFontHeightF = 0.015 ; increase font height + lgres@lgLineLabelStrings = (/"","",""/) ; no labels in lines + lgres@lgAutoManage = False ; must have to adjust fonts + lgres@vpWidthF = 0.55 + lgres@vpHeightF = 0.15 + lgres@lgLineColors = (/"red","blue","black"/) + lgres@xyLineThicknesses = (/2.,2.,2./) + lgres@lgDashIndexes = (/0.,16.,16./) ; solid, dashed + lgres@lgLineLabelsOn = False + gsn_legend_ndc(wks,3,cases(0:3),0.225,0.23,lgres) ; draw common legend MAY HAVE TO ADJUST Y + gsn_panel(wks,plot,(/2,2/),pres) ; draw panel plot + end if + if (p .eq. 8) then ; snow + cases(1) = "MODIS 2001-2003" + delete(lgres) + lgres = True ; allow legend resources + lgres@lgPerimOn = False + lgres@lgLabelFontHeightF = 0.015 ; increase font height + lgres@lgLineLabelStrings = (/"","",""/) ; no labels in lines + lgres@lgAutoManage = False ; must have to adjust fonts + lgres@vpWidthF = 0.55 + lgres@vpHeightF = 0.15 + lgres@lgLineColors = res@xyLineColors ; set these equal to those + lgres@lgDashIndexes = res@xyDashPatterns ; ditto + lgres@lgLineLabelsOn = False + gsn_legend_ndc(wks,2,cases(0:1),0.225,0.15,lgres) ; draw common legend MAY HAVE TO ADJUST Y + gsn_panel(wks,plot,(/2,2,1/),pres) ; draw panel plot + end if + frame(wks) + + end do ; end of region loop +end if ; end of plot_check loop + + delete(vars) + delete(vcheck1) + if (isvar("array1")) then + delete(array1) + end if + if (isvar("array2")) then + delete(array2) + end if + if (isvar("array3")) then + delete(array3) + end if + if (isvar("array4")) then + delete(array4) + end if + if (isvar("array5")) then + delete(array5) + end if + if (isvar("array7")) then + delete(array7) + end if + delete(scale_type) + delete(plot) + delete(units) + delete(long_name) + delete(mm) + delete(vv) + delete(titstr) + delete(sFlag) + delete(aFlag) + + end do ; end plot loop + + delete(landmask2) + delete(landmask3) + delete(landmask4) + delete(landmask5) + delete(landmask7) + delete(area2) + delete(area3) + delete(area4) + delete(area5) + delete(area7) + +;---------------------------------------------- +; Create global maps delineating regions (on model grid) +;---------------------------------------------- + +;---------------------------------------------- +; common plot resources +;---------------------------------------------- + if(isvar("res")) then + delete(res) + end if + res = True + res@mpProjection = "Robinson" + res@mpPerimOn = False + res@mpGridLineColor = "transparent" + res@cnFillOn = True + res@cnLineLabelsOn = False + res@lbLabelBarOn = False + res@cnLinesOn = False + res@mpGridLonSpacingF = 180.0 ; Set lon grid spacing + res@mpGridLatSpacingF = 90.0 ; Set lat grid spacing + res@mpFillOn = False + res@mpOutlineOn = True + res@mpGridAndLimbOn = True + res@mpGridLineDashPattern = 2 ; Set grid line dash pattern (dash) + res@mpGridLineThicknessF = 0.5 ; Set grid line thickness + sstr=(/" "/) + res@gsnMaximize = True + res@cnLevelSelectionMode = "ExplicitLevels" + wks = gsn_open_wks(plot_type,wkdir+"set12_reg_all") + cmap = (/"(/1.00, 1.00, 1.00/)", "(/0.00, 0.00, 0.00/)", \ + "(/1.00, 1.00, 1.00/)", "Blue" /) + gsn_define_colormap(wks, cmap ) + res@gsnLeftString = " " + res@gsnRightString = " " + res@cnLevels = (/0./) + res@cnRasterModeOn = True + + flag1 = area1 + flag1 = -0.1 + + + if (paleo .eq. "True") then + startRegion=nreg-3 ; N. + S. Hemisphere land + global + else + startRegion=0 + end if + + if (paleo .eq. "True") then + landfrac = in1->landfrac + oro = new((/nlat1,nlon1/),integer) + oro = 1 + oro@_FillValue = 0 + oro = where(landfrac .gt. 0.0, 1,0) +; fname = wkdir+prefix +; fname = wkdir+"basename" +; fname = wkdir+cases(0) + fname = wkdir+getenv(name1) + paleo_outline(oro,lat1,lon1,1, fname) + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + do ar = startRegion, nreg-1 ; don't include northern and southern hemisphere and global + flag1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) = 0.1*fland1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) - 1.e-36 + end do + else + do ar = 0, nreg-4 ; don't include northern and southern hemisphere and global + flag1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) = 0.1*fland1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) - 1.e-36 + end do + end if + + res@gsnCenterString = "Model Data Regions - All" + plot = gsn_csm_contour_map(wks, flag1, res) + + do ar = startRegion, nreg-1 +;---------------------------------------------- +; Open file for plots +;---------------------------------------------- + wks = gsn_open_wks(plot_type,wkdir+"set12_reg_"+ps_id(ar)) + cmap = (/"(/1.00, 1.00, 1.00/)", "(/0.00, 0.00, 0.00/)", \ + "(/1.00, 1.00, 1.00/)", "Blue" /) + gsn_define_colormap(wks, cmap ) + flag1 = -0.1 + flag1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) = 0.1*fland1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) - 1.e-36 + res@gsnCenterString = "Model Data Regions - " + ptitstr(ar) + boxstr(ar) + plot = gsn_csm_contour_map(wks, flag1, res) + end do + + print ("=========================================") + print ("Finish Time: "+systemfunc("date") ) + print ("=========================================") + + ; remove error file if program completes successfully. + filo = wkdir +"/set12_error_file" + system("/bin/rm -f " + filo) + +end diff --git a/lnd_diag/model1-model2/set_10.ncl b/lnd_diag/model1-model2/set_10.ncl new file mode 100755 index 00000000..72ed10dc --- /dev/null +++ b/lnd_diag/model1-model2/set_10.ncl @@ -0,0 +1,1463 @@ +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$DIAG_SHARED/$VAR_MASTER" +load "$DIAG_SHARED/lnd_func.ncl" +load "$DIAG_SHARED/find_var_with_alts.ncl" +;************************************************ +begin + print ("=========================================") + print ("Starting: set_10.ncl") + print ("Start Time: "+systemfunc("date") ) + print ("=========================================") + ptmpdir1 = getenv("PTMPDIR_1") + ptmpdir2 = getenv("PTMPDIR_2") + wkdir = getenv("WKDIR") + obsdata = getenv("OBS_DATA") + obs_res = getenv("OBS_RES") + paleo = getenv("paleo") + plotObs = getenv("plotObs") + useCommon1 = getenv("UseCommonName_1") + useCommon2 = getenv("UseCommonName_2") + raster = getenv("raster") + plot_type = getenv("PLOTTYPE") + land_mask = getenv("land_mask1") + user_defined = getenv("expContours") + cn = getenv("CN") + colormap = getenv("colormap") + projection=getenv("projection") + +;; print((/"PTMPDIR_1: "+ptmpdir1/)) +;; print((/"PTMPDIR_2: "+ptmpdir2/)) +;; print((/"wkdir: "+wkdir/)) + + flandmask = stringtofloat(land_mask) + + nyrs1 = stringtointeger(getenv("clim_num_yrs_1")) + nyrs2 = stringtointeger(getenv("clim_num_yrs_2")) + +;************************************************************************** +; Cut off top and bottom percent to tighten contour intervals. 12/06 nanr +;************************************************************************** + pCut = 5 + pLow = 0.05 + pHigh = 0.95 + +; the set lists contains two columns of information. This comes into +; NCL as one long string that we need to parse out. + tmp = stringtocharacter(asciiread(wkdir+"/master_set10.txt",-1,"string")) + nvar = dimsizes(tmp(:,0)) + scale_type = new(nvar,"string") + vars = new(nvar,"string") + c13Flag = new(nvar,"string") + dynContour = new(nvar,"string") + do i=0,nvar-1 + scale_type(i) = charactertostring(tmp(i,0:12)) + dynContour(i) = charactertostring(tmp(i,13)) + vars(i) = charactertostring(tmp(i,16:)) + c13Flag(i) = charactertostring(tmp(i,16:19)) ; derived variable prefix + end do + +;************************************************* +; get case names and create filenames to read in +;************************************************* + sig_lvl = stringtofloat(getenv("sig_lvl") ) + zoom = stringtointeger(getenv("reg_contour")); # (1 = SUB, 0 = GLOBAL) + if (zoom.eq.1)then + min_lat = stringtofloat(getenv("min_lat")) + max_lat = stringtofloat(getenv("min_lat")) + min_lon = stringtofloat(getenv("min_lon")) + max_lon = stringtofloat(getenv("min_lon")) + end if + seasons = (/"DJF","JJA","MAM","SON","ANN"/) +;************************************************* +; common plot resources +;************************************************* + pres = True ; panel only resources + +; read in case strings + cases = new(3,string) + prefix = new(2,string) + do m = 0,1 + if (m .eq. 0) then + useFlag = useCommon1 + end if + if (m .eq. 1) then + useFlag = useCommon2 + end if + if (useFlag .eq. "True") then + name1 = "commonName_"+(m+1) + else + name1 = "caseid_"+(m+1) + end if + name2 = "prefix_"+(m+1) + cases(m) = getenv(name1) + prefix(m) = getenv(name2) + end do + +; loop over seasons + + do n = 0,dimsizes(seasons)-1 + print("Processing season " + seasons(n)) + in1 = addfile(ptmpdir1+"/"+prefix(0)+"_"+seasons(n)+"_climo.nc","r") ; used for first 3 plots + in2 = addfile(ptmpdir2+"/"+prefix(1)+"_"+seasons(n)+"_climo.nc","r") ; used for first 3 plots + in11 = addfile(ptmpdir1+"/"+prefix(0)+"_"+seasons(n)+"_means.nc","r") + in22 = addfile(ptmpdir2+"/"+prefix(1)+"_"+seasons(n)+"_means.nc","r") + ; get degrees of freedom for t test + if (isatt(in11,"num_yrs_averaged")) then + dof_1 = in11@num_yrs_averaged + else + dof_1 = getfilevardimsizes(in11, "time") + end if + if (isatt(in22,"num_yrs_averaged")) then + dof_2 = in22@num_yrs_averaged + else + dof_2 = getfilevardimsizes(in22, "time") + end if +; open observation files + + ptr_racmo = addfile(obsdata+"/RACMO2_GR_"+seasons(n)+"_climo.nc","r") ; RACMO2.3 + +; extract years averaged for plotting + if (isatt(in1,"yrs_averaged")) then + yrs_ave1 = in1@yrs_averaged + end if + if (isatt(in2,"yrs_averaged")) then + yrs_ave2 = in2@yrs_averaged + end if + +; check to make sure levels are the same for both model cases + nlev1 = 0 + nlev2 = 0 + if (isfilevar(in1,"levsoi")) then + nlev1 = getfilevardimsizes(in1,"levsoi") + end if + if (isfilevar(in2,"levsoi")) then + nlev2 = getfilevardimsizes(in2,"levsoi") + end if + if (isfilevar(in1,"levgrnd")) then + nlev1 = getfilevardimsizes(in1,"levgrnd") + end if + if (isfilevar(in2,"levgrnd")) then + nlev2 = getfilevardimsizes(in2,"levgrnd") + end if + if (nlev1 .eq. 0 .or. nlev2 .eq. 0) + print("FATAL ERROR Set2: nlev1 = 0") + exit + end if + + if (isfilevar(in1,"TLAKE") ) then + if (isfilevar(in1,"levlak") ) then + nlevlak1 = getfilevardimsizes(in1,"levlak") + end if + end if + if (isfilevar(in2,"TLAKE") ) then + if (isfilevar(in2,"levlak") ) then + nlevlak2 = getfilevardimsizes(in2,"levlak") + end if + end if + + lon1 = in1->lon + lon2 = in2->lon + lat1 = in1->lat + lat2 = in2->lat + nlon1 = dimsizes(lon1) + nlon2 = dimsizes(lon2) + nlat1 = dimsizes(lat1) + nlat2 = dimsizes(lat2) + time1 = in1->time + time2 = in2->time + +; extract landfrac if paleo file + if (paleo .eq. "True") then + flandmask = 0 + landfrac = in1->landfrac(:,:) + oro = new((/nlat1,nlon1/),integer) + oro = 1 + oro@_FillValue = 0 + oro = where(landfrac .gt. 0.0, 1,0) +; fname = wkdir+prefix(0) +; fname = wkdir+"basename" + fname = wkdir+getenv(name1) + paleo_outline(oro,lat1,lon1,flandmask, fname ) + res = True + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; truncate soil layers if needed + if (nlev1 .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix(0)+" nlev: " + nlev1 +"]") + nlev1 = 10 + end if + if (nlev2 .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix(1)+" nlev: " + nlev2 +"]") + nlev2 = 10 + end if + +; loop over variables + + snowFlag = 0 + do i=0,dimsizes(vars)-1 + + if (paleo .ne. "True") then + if (snowFlag .eq. 1) then ; run SNOWDP twice to see both obs datasets + i = i-1 + end if + if (vars(i) .eq. "SNOWDP") then + snowFlag = snowFlag + 1 + end if + end if + print("Processing variable " + vars(i)) + + plot_check_1 = True + plot_check_11 = True + plot_check_2 = True + plot_check_22 = True + + info = var_init(vars(i)) ; read in case 1 variable + filevar = find_var_with_alts(in1, vars(i), info) +; now filevar is the name of the variable in the file (either +; vars(i) or one of its alternate names); if this variable was +; not found in the file, then filevar will be "" + + plot_check = True + if (filevar .ne. "")then + if (isvar("x1")) then + delete(x1) + end if + if (isvar("x11")) then + delete(x11) + end if + if (c13Flag(i) .eq. "C13_") then + x1 = derive_del13C(vars(i),in1, scale_type(i),0,2) + x11 = derive_del13C(vars(i),in11,scale_type(i),0,2) + else + x1 = in1 ->$filevar$ + x11 = in11->$filevar$ + plot_check_1 = True + plot_check_11 = True + delete(filevar) + end if + else + if (info@derivedVariable .eq. True) then + x1 = derive_land(vars(i),in1) + x11 = derive_land(vars(i),in11) + else + print("variable "+ vars(i)+ " is not a defined variable.") + plot_check_1 = False + plot_check_11 = False + continue + end if + end if + + info = var_init(vars(i)) ; read in case 2 variable + filevar = find_var_with_alts(in2, vars(i), info) +; now filevar is the name of the variable in the file (either +; vars(i) or one of its alternate names); if this variable was +; not found in the file, then filevar will be "" + + plot_check = True + if (filevar .ne. "")then + if (isvar("x2")) then + delete(x2) + end if + if (isvar("x22")) then + delete(x22) + end if + if (c13Flag(i) .eq. "C13_") then + x2 = derive_del13C(vars(i),in2, scale_type(i),0,2) + x22 = derive_del13C(vars(i),in22,scale_type(i),0,2) + else + x2 = in2 ->$filevar$ + x22 = in22->$filevar$ + plot_check_2 = True + plot_check_22 = True + delete(filevar) + end if + else + if (info@derivedVariable .eq. True) then + x2 = derive_land(vars(i),in2) + x22 = derive_land(vars(i),in22) + else + print("variable "+ vars(i)+ " is not a defined variable.") + plot_check_2 = False + plot_check_22 = False + continue + end if + end if + + obsFlag = 0 ; read in observations (if present) (1 = on; 0 = off) + if (isvar("x3") ) then + delete(x3) + end if + if (paleo .ne. "True") then + if (vars(i) .eq. "ASA" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3*100. ; fraction to % + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "QSOIL" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "U10" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FGR" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FLDS" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FIRE" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3 * -1. ; defined negative in RACMO2.3 + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "RAIN" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "PBOT" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "Q2M" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "QICE_FRZ" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "RH2M" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3 * 100. ; fraction to % + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "QICE_MELT" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FSH" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3 * -1. ; defined neg. in CESM + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "QICE" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "SNOW" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "QSNOMELT" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FSDS" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "FSR" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3 * -1. ; defined as negative in RACMO2.3 + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + else + if (vars(i) .eq. "TSA" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3 - 273.16 ; K to C + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.3" + obsFlag = 1; + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + + end if ; paleo + + if (paleo .eq. "True") then + obsFlag = 0 + end if + + if (obsFlag .eq. 1) then + nlon3 = dimsizes(lon3) + nlat3 = dimsizes(lat3) + end if + + if (x1@units .eq. "K") then + x1 = x1 - 273.15 ; convert K to C + x2 = x2 - 273.15 ; convert K to C + end if + + ; catch NaNs + if (isvar("x1") ) then + x1 = catchNaN(x1) + end if + if (isvar("x11") ) then + x11 = catchNaN(x11) + end if + if (isvar("x2") ) then + x2 = catchNaN(x2) + end if + if (isvar("x22") ) then + x22 = catchNaN(x22) + end if + + if (all(ismissing(x1)))then + print("variable "+ vars(i)+ " contains all missing values.") + plot_check_1 = False + delete(x1) + end if + if (all(ismissing(x11)))then + print("variable "+ vars(i)+ " not found") + plot_check_11 = False + delete(x11) + end if + if (all(ismissing(x2)))then + print("variable "+ vars(i)+ " not found") + plot_check_2 = False + delete(x2) + end if + if (all(ismissing(x22)))then + print("variable "+ vars(i)+ " not found") + plot_check_22 = False + delete(x22) + end if + +; ---------------------- +; start plotting process if variables exist +; ---------------------- + if (plot_check_1 .eq. True .and. plot_check_11 .eq. True .and. plot_check_2 .eq. True .and. plot_check_22 .eq. True) then + x1 = scale_var(x1, scale_type(i),info) + x11 = scale_var(x11,scale_type(i),info) + x2 = scale_var(x2, scale_type(i),info) + x22 = scale_var(x22,scale_type(i),info) + +; flip longitudes to -180 to 180 if needed + if (min(x2&lon) .ge. 0 .and. min(x1&lon) .lt. 0) then + x2 = lonFlip(x2) + x22 = lonFlip(x22) + end if + if (min(x1&lon) .ge. 0 .and. min(x2&lon) .lt. 0) then + x1 = lonFlip(x1) + x11 = lonFlip(x11) + end if + +; process observations if present. Case 1 as root. +; flip x3 longitudes to -180 to 180 if needed + if (obsFlag .eq. 1) ; (1 = on; 0 = off) + if (min(x1&lon) .lt. 0 .and. min(x3&lon) .ge. 0) then + if (lon3(0) .ge. 0.) then + x3 = lonFlip(x3) + end if + end if + end if + + rank1 = dimsizes(dimsizes(x1)) ; calculate size + rank2 = dimsizes(dimsizes(x2)) + + useCase1 = True + useCase2 = False + if (nlat1 .ne. nlat2) then + if (nlat1 .gt. nlat2) then ; interpolate x1 to x2 grid + x1_interp = linint2_Wrap(lon1,lat1,x1,True,lon2,lat2,0) + x2_interp = x2 + x11_interp = linint2_Wrap(lon1,lat1,x11,True,lon2,lat2,0) + x22_interp = x22 + useCase1 = False + useCase2 = True + use_nlat = nlat2 + use_nlon = nlon2 + else + x2_interp = linint2_Wrap(lon2,lat2,x2,True,lon1,lat1,0) + x1_interp = x1 + + x22_interp = linint2_Wrap(lon2,lat2,x22,True,lon1,lat1,0) + x11_interp = x11 + use_nlat = nlat1 + use_nlon = nlon1 + end if + else + use_nlon = nlon1 + use_nlat = nlat1 + x1_interp = x1 + x2_interp = x2 + x11_interp = x11 + x22_interp = x22 + end if + +; interpolate obs to new grid if necessary + if (obsFlag .eq. 1) ; (1 = on; 0 = off) + if (nlat1 .ne. nlat3 .or. nlat2 .ne. nlat3) then + if (useCase1 .eq. True) then + x3_interp = linint2_Wrap(lon3,lat3,x3,True,lon1,lat1,0) ; interpolate x3 to x1 grid + end if + if (useCase2 .eq. True) then + x3_interp = linint2_Wrap(lon3,lat3,x3,True,lon2,lat2,0) ; interpolate x3 to x2 grid + end if + else + x3_interp = x3 + end if +; different plots for observations +; plot = new(9,graphic) + else +; x1_interp = x1 ; fix, per Keith Oleson +; plot = new(4,graphic) + end if ; end observations + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then + plot = new(9,graphic) + else + plot = new(4,graphic) + end if + + if (x1@units .eq. "K") then + useUnits = "C" ; assume same units for x1 and x2 + else + useUnits = x1@units ; assume same units for x1 and x2 + end if +;******************************************************************* +; 3D VARIABLES +;******************************************************************* + if (rank1.eq.3)then + +; calculate time average + var_avg_1 = dim_avg_n_Wrap(x1,0) ; time average + var_avg_2 = dim_avg_n_Wrap(x2,0) ; time average + var_avg_1_intp = dim_avg_n_Wrap(x1_interp,0) ; time average + var_avg_2_intp = dim_avg_n_Wrap(x2_interp,0) ; time average + + if (obsFlag .eq. 1) then ; (1 = obs; 0 = no obs) + x3_interp = mask(x3_interp, (x1_interp(0,:,:) .eq. x1_interp@_FillValue), False) ; remove non-land points + var_avg_3 = x3_interp + var_avg_3_intp = x3_interp + min3 = min(var_avg_3) + max3 = max(var_avg_3) + delete(x3_interp) + end if + + min1 = min(var_avg_1) + min2 = min(var_avg_2) + max1 = max(var_avg_1) + max2 = max(var_avg_2) + + if (min1 .le. min2) then + if (isvar("min12")) then + delete(min12) + end if + min12 = min1 + else + if (isvar("min12")) then + delete(min12) + end if + min12 = min2 + end if + + if (max1 .ge. max2) then + if (isvar("max12")) then + delete(max12) + end if + max12 = max1 + else + if(isvar("max12")) then + delete(max12) + end if + max12 = max2 + end if + +;************************************************************************** +; Set contour Levels: Dynamic vs pre-defined contours +;************************************************************************** + +; dynContour: 0=dynamic;1=explicit + if (info@cn_Explicit .eq. True .and. dynContour(i) .eq. 1 .and. user_defined .eq. 1) then + expFlag = True + else + expFlag = False + end if + + res = True + res = set10ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + if (expFlag .eq. True) then ; explicitly defined contours. + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_Levels ; contour levels + else + if (c13Flag(i) .eq. "C13_") then + useMin12 = -40 ; hardcode boundaries to expectations + useMax12 = 0 ; hardcode boundaries to expectations + else + x = ndtooned(var_avg_1) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin12 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax12 = x(iHigh) + delete(x) + end if + maxLevel = 9 + if (vars(i) .eq. "SNOWDP") then + useMin12 = 0. + useMax12 = 1. + end if + if (vars(i) .eq. "H2OSNO") then + useMin12 = 0. + useMax12 = 500. + end if + if (vars(i) .eq. "SNOWICE") then + useMin12 = 0. + useMax12 = 500. + end if + if (vars(i) .eq. "TWS") then + useMin12 = 4000. + useMax12 = 10000. + end if + mnmxint = nice_mnmxintvl( useMin12, useMax12, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; explicit contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + + delete(useMin12) + delete(useMax12) + end if + +; open postscript file and choose colormap + if (isvar("wks")) then + delete(wks) + end if + if (vars(i) .eq. "SNOWDP") then + if (snowFlag .eq. 1) then +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) +;; print((/"Fullname: "+wkdir + "set10_" + seasons(n)+"_"+vars(i)+"_FOSTERDAVY"/)) + fullname = wkdir+"set10_"+seasons(n)+"_"+vars(i)+"_FOSTERDAVY" +;; print((/"Fullname: "+fullname/)) +;; wks = gsn_open_wks(plot_type,wkdir + "set10_" + seasons(n)+"_"+vars(i)+"_FOSTERDAVY") + wks = gsn_open_wks(plot_type,fullname) + else +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) +;; print((/"Fullname: "+wkdir + "set10_" + seasons(n)+"_"+vars(i)+"_CMC"/)) + fullname = wkdir + "set10_" + seasons(n)+"_"+vars(i)+"_CMC" +;; print((/"Fullname: "+fullname/)) +;; wks = gsn_open_wks(plot_type,wkdir + "set10_" + seasons(n)+"_"+vars(i)+"_CMC") + wks = gsn_open_wks(plot_type,fullname) + end if + else +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) + fullname = wkdir + "set10_" + seasons(n)+"_"+vars(i) +;; print((/"Fullname: "+fullname/)) +;; print((/"Fullname: "+wkdir + "set10_" + seasons(n)+"_"+vars(i)/)) +;; wks = gsn_open_wks(plot_type,wkdir + "set10_" + seasons(n)+"_"+vars(i)) + wks = gsn_open_wks(plot_type,fullname) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap + gsn_define_colormap(wks,cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) + res@cnFillPalette = cmap1(13:240,:) + end if + + res@tiMainString = cases(0) ; set case 1 titles + res@gsnCenterString = "(yrs " + yrs_ave1 + ")" + min1 = min(var_avg_1) + max1 = max(var_avg_1) + + res@gsnLeftString = "" ; vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + delete(min1) + delete(max1) + + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + + ctr = 0 + plot(ctr) = gsn_csm_contour_map(wks,var_avg_1,res) ; case 1 plot + ctr = ctr+1 + +; Note: Don't delete res here - save until after obs are posted. + res@tiMainString = cases(1) ; set case 2 titles + res@gsnCenterString = " (yrs " + yrs_ave2 + ")" + if (vars(i) .eq. "CH4PROD" .or. vars(i) .eq. "CH4_SURF_EBUL_SAT" .or. vars(i) .eq. "CH4_SURF_EBUL_UNSAT" .or. vars(i) .eq. "CPOOL") then + if (isatt(res,"cnLabelBarEndStyle")) then + if (res@cnLabelBarEndStyle.eq."IncludeMinMaxLabels") then + res@cnLabelBarEndStyle = "IncludeOuterBoxes" ; temporarily turn off minmax labels. + end if + end if + else + if (isatt(res,"cnLabelBarEndStyle")) then + res@cnLabelBarEndStyle = "IncludeMinMaxLabels" + end if + end if + plot(ctr) = gsn_csm_contour_map(wks,var_avg_2,res) ; case 2 plot + ctr = ctr+1 + +; Note: Don't delete res here - save until after obs are posted. + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; OBS plot (if present) + res = True + res = set10ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + res@tiMainString = cases(2) + res@gsnCenterString = " " + delete(min3) + delete(max3) + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + plot(ctr) = gsn_csm_contour_map(wks,var_avg_3,res) + ctr = ctr+1 + end if + delete(res) ; delete res in time for difference plots. + +; difference plots + if (isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag12.rgb") ; read in colormap (increase color resolution for difference maps) + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; OBS plot (if present) + res = True + res = set10ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + res@cnFillPalette = cmap1(13:240,:) + + +; Case 1 - Obs + diff = var_avg_1_intp ; trick to maintain meta data + check = isdouble(var_avg_3_intp) + if (check .eq. True) then + diff = var_avg_1_intp - doubletofloat(var_avg_3_intp) + else + diff = var_avg_1_intp - var_avg_3_intp + end if + delete(check) + mindiff = min(diff) + maxdiff = max(diff) + res@tiMainString = cases(0) + res@gsnCenterString = "- Observations" +; set contour levels ( use larger contour intervals for Model vs Obs ) + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiffObs ; contour levels + + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(diff) + delete(mindiff) + delete(maxdiff) + ctr = ctr+1 + +; Case 2 - Obs + diff = var_avg_1_intp ; trick to maintain meta data + diff = var_avg_2_intp - doubletofloat(var_avg_3_intp) + res@tiMainString = cases(1) + res@gsnCenterString = "- Observations" + mindiff = min(diff) + maxdiff = max(diff) + + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiffObs ; contour levels + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(res) + delete(diff) + delete(mindiff) + delete(maxdiff) + ctr = ctr+2 + end if + +; Case 1 - Case 2 + diff = var_avg_1_intp ; trick to maintain meta data + diff = var_avg_1_intp - var_avg_2_intp + mindiff = min(diff) + maxdiff = max(diff) + +; set contour levels + res = True + res = set10ResMvM(res) + res@cnFillPalette = cmap1(13:240,:) + + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + if (expFlag .eq. False) then + sd = dim_stddev(ndtooned(diff)) + + if (vars(i).eq."PREC") then + useMin1 = -2.*sd ; = mindiff + useMax1 = 2.*sd ; = maxdiff + else + useMin1 = -2.5*sd ; = mindiff + useMax1 = 2.5*sd ; = maxdiff + end if + + if (c13Flag(i) .eq. "C13_") then + useMin1 = -40 ; hardcode boundaries to expectations + useMax1 = 0 ; hardcode boundaries to expectations + end if + if (mindiff.eq.maxdiff) then ; two cases are identical + useMin1 = -6 + useMax1 = 6 + diff = 0. + if (isatt(res,"cnLabelBarEndStyle")) then + if (res@cnLabelBarEndStyle.eq."IncludeMinMaxLabels") then + res@cnLabelBarEndStyle = "IncludeOuterBoxes" ; temporarily turn off minmax labels. + end if + end if + end if + maxLevel = 13 + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete([/useMin1,useMax1/]) + else + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiff ; contour levels + end if + + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + res@tiMainString = cases(0) + res@gsnCenterString= "- "+cases(1) + + res@gsnLeftString = "" ; vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ; useUnits ; assume same units for x1 and x2 + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(res) + ctr = ctr+1 + delete([/diff,mindiff,maxdiff/]) + + res = True ; T-Test plots + res = set10ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + var_variance_1 = dim_variance_n_Wrap(x11_interp,0) ; time average + var_variance_2 = dim_variance_n_Wrap(x22_interp,0) ; time average + +; set variances < 0 to missing so don't divide by zero + if (any(var_variance_1 .le. 0)) then + var_variance_1 = mask(var_variance_1,(var_variance_1 .gt. 0),True) + end if + if (any(var_variance_2 .le. 0)) then + var_variance_2 = mask(var_variance_2,(var_variance_2 .gt. 0),True) + end if +; calculate t-test + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then + + pfinal = new((/use_nlat,use_nlon/),"float") + + diff13 = abs(var_avg_1_intp - doubletofloat(var_avg_3_intp)) + diff23 = abs(var_avg_2_intp - doubletofloat(var_avg_3_intp)) + delete(var_avg_3_intp) + copy_VarCoords(prob,diff13) + copy_VarCoords(prob,diff23) + + p1d = ndtooned(prob) + d131d = ndtooned(diff13) + d231d = ndtooned(diff23) + + do c=0,dimsizes(p1d)-1 + if (.not. ismissing(d131d(c)) .and. .not. ismissing(d231d(c) )) then + if (.not. ismissing(p1d(c))) then + if (d131d(c) .gt. d231d(c) ) then ; case 2 is closer to obs + if (p1d(c) .lt. sig_lvl ) then + p1d(c) = 10 ; blue + end if + else ; case 1 is closer to obs + if (p1d(c) .lt. sig_lvl ) then + p1d(c) = 5 ; red + end if + end if + end if + end if + end do + + pfinal = onedtond(p1d,dimsizes(prob)) + copy_VarCoords(prob,pfinal) + + res@gsnCenterString = "" + res@gsnRightString = "" + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.3 + res@tiMainString = cases(0)+" (green)" + res@gsnCenterString = cases(1)+" (red)" + ; res@tiMainString = "Case1+ (green) and Case2+ (red) relative to obs" + res@gsnLeftString = "Model relative to Obs" +; res@gsnSpreadColors = False + res@cnFillMode = "RasterFill" + res@cnLevelSelectionMode = "ExplicitLevels" + ; nanr 12/01/07 + if (isvar("cnLevels")) then + delete(res@cnLevels) + end if + res@cnLevels = (/2,7/) ; user defined sig level to view + res@cnFillColors = (/"white","green","red"/) ; white=NoChange; green=Case1 better; red=Case2 better +; res@cnFillColors = (/0,11,11/) ; only have red squares + + + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + + plot(ctr) = gsn_csm_contour_map(wks, pfinal,res) ; will be sig test + delete([/res,pfinal,p1d,d131d,d231d,diff13,diff23/]) + ctr = ctr+1 + end if + + res = True ; t-test plot (3D vars) + res = set10ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + +; res@gsnSpreadColors = False + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = sig_lvl ; user defined sig level to view + res@cnFillColors = (/"red","white"/) ; only have red squares + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.245 + res@tiMainString = "T-Test of two Case means at each grid point" + res@gsnLeftString = "Cells are significant at " + sig_lvl +" level" + res@gsnRightString = "" + res@gsnCenterString = "" + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + res@tiMainOffsetYF = 0 + + plot(ctr) = gsn_csm_contour_map(wks,prob,res) ; will be sig test + delete(res) + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; panel plot + nrows = 3 + ncols = 3 + else + nrows = 2 + ncols = 2 + end if + pres@txString = seasons(n)+" "+vars(i)+" ("+useUnits+")" ; " + gsn_panel(wks,plot,(/nrows,ncols/),pres) ; draw panel plot + else +;************************************************************************* +; 4D VARIABLES +;************************************************************************* +; calculate 4D average + var_avg_1 = dim_avg_n_Wrap(x1(:,:nlev1-1,:,:),0) ; time average + var_avg_1_intp = dim_avg_n_Wrap(x1_interp(:,:nlev1-1,:,:),0) ; time average + var_avg_2 = dim_avg_n_Wrap(x2(:,:nlev1-1,:,:),0) ; time average + var_avg_2_intp = dim_avg_n_Wrap(x2_interp(:,:nlev1-1,:,:),0) ; time average + + if (isdim(x1_interp,"levlak") )then + nlev1 = nlevlak1 + end if + if (isdim(x2_interp,"levlak") )then + nlev2 = nlevlak2 + end if + + if (nlev1 .ne. nlev2) then + print("Set 2 Failed: number of soil levels is unequal: " + nlev1 + " vs " + nlev2) + end if + + karr = (/0,4,9/) + do lev=0,2 ; assume both cases have same number of levels. + k = karr(lev) + res = True + res = set10ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if +; open postscript file (4D) + if (isvar("wks")) then + delete(wks) + end if +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) +;; print((/"Fullname: "+wkdir + "set10_" + seasons(n)+"_"+vars(i)+"_"+k/)) + wks = gsn_open_wks(plot_type,wkdir + "set10_" + seasons(n)+"_"+vars(i)+"_"+k) + if (isvar("cmap")) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap + gsn_define_colormap(wks,cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) + res@cnFillPalette = cmap1(13:240,:) + end if + + min1 = min(var_avg_1(k,:,:)) + max1 = max(var_avg_1(k,:,:)) + + if (expFlag .eq. True) then ; set explicit contour levels (4D) + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + st = "cn_Level"+k ; get cn levels for a specific levsoi + res@cnLevels = info@$st$ + else + maxLevel = 9 + x = ndtooned(var_avg_1(k,:,:)) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin1 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax1 = x(iHigh) + + delete(x) + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + + res@pmLabelBarHeightF = 0.085 + res@pmLabelBarWidthF = 0.4 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = -.01 + res@tiMainString = cases(0) + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + res@gsnCenterString = "(yrs " + yrs_ave1 +")" + + plot(0) = gsn_csm_contour_map(wks,var_avg_1(k,:,:),res) ; 4D case 1 plot + +; set case 2 titles (4D) +; Note: Don't delete res here - save until after obs are posted. + + delete(min1) + delete(max1) + + res@tiMainString = cases(1) + res@gsnCenterString = "(yrs " + yrs_ave2 +")" + plot(1) = gsn_csm_contour_map(wks,var_avg_2(k,:,:),res) ; 4D case 2 plot + delete(res) + + res = True + res = set10ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; diff plot (4D) + if (isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag12.rgb") ; read in colormap ( increase color resolution for difference maps) + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + if (colormap.eq.1) then + res@cnFillPalette = cmap1(13:240,:) + end if + + diff = var_avg_1_intp ; trick to maintain meta data + diff(k,:,:) = var_avg_1_intp(k,:,:) - var_avg_2_intp(k,:,:) + res@tiMainString = "Case 1 - Case 2" + mindiff = min(diff(k,:,:)) + maxdiff = max(diff(k,:,:)) + + if (expFlag .eq. True) then + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + st = "cn_Level"+k+"Diff" ; get cn levels for a specific levsoi + res@cnLevels = info@$st$ + else + sd = dim_stddev(ndtooned(diff)) + + if (vars(i).eq."PREC") then + useMin1 = -2.*sd ; = mindiff + useMax1 = 2.*sd ; = maxdiff + else + useMin1 = -2.5*sd ; = mindiff + useMax1 = 2.5*sd ; = maxdiff + end if + if (useMax1 .gt. maxdiff) then + useMin1 = -sd/5. + useMax1 = sd/5. + end if + maxLevel = 13 + + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + res@pmLabelBarHeightF = 0.085 + res@pmLabelBarWidthF = 0.4 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = -.01 + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + res@tiMainString = cases(0) + res@gsnCenterString = "- "+cases(1) + plot(2) = gsn_csm_contour_map(wks,diff(k,:,:),res) + delete(res) + delete(diff) + + res = True ; T-Test plot (4D) + res = set10ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + var_variance_1 = dim_variance_n_Wrap(x11_interp(:,:nlev1-1,:,:),0) + var_variance_2 = dim_variance_n_Wrap(x22_interp(:,:nlev2-1,:,:),0) + + if (any(var_variance_1 .le. 0)) then ;set variances < 0 to missing so don't divide by zero + var_variance_1 = mask(var_variance_1,(var_variance_1 .gt. 0),True) + end if + if (any(var_variance_2 .le. 0)) then + var_variance_2 = mask(var_variance_2,(var_variance_2 .gt. 0),True) + end if + + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = sig_lvl ; user defined sig level to view + res@cnFillColors = (/"red","white"/) ; only have red squares + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.245 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = 0 + res@tiMainString = "T-Test of two Case means at each grid point" + res@gsnCenterString = " " + res@gsnRightString = " " + res@gsnLeftString = "Cells are significant at " + sig_lvl +" level" + plot(3) = gsn_csm_contour_map(wks,prob(k,:,:),res) ; will be sig test + delete(res) + pres@txString = seasons(n)+" "+vars(i)+" Level " + k+" ("+useUnits+")" ; " + gsn_panel(wks,plot,(/2,2/),pres) ; draw panel plot + end do ; level loop + end if ; 4d variables + delete([/x1,x11,x2,x1_interp,x2_interp,x11_interp,x22_interp,x22,prob,var_avg_1,var_avg_2/]) + if (isvar("var_avg_3") ) then + delete(var_avg_3) + end if + if (isvar("cmap1") ) then + delete(cmap1) + end if + delete(var_avg_1_intp) + delete(var_avg_2_intp) + delete(var_variance_1) + delete(var_variance_2) + delete(plot) + else + print("Variable " + vars(i) + " does not exist.") + end if ; plot_check loop + if (isvar("useUnits")) then + delete(useUnits) + end if + if (isvar("lon3")) then + delete(lon3) + end if + if (isvar("lat3")) then + delete(lat3) + end if + end do ; end variable loop + end do ; seasons loop +; remove error file if program completes successfully. + filo = wkdir +"/set10_error_file" + system("/bin/rm -f " + filo) +end diff --git a/lnd_diag/model1-model2/set_10_seas.ncl b/lnd_diag/model1-model2/set_10_seas.ncl new file mode 100755 index 00000000..9a1dcdd0 --- /dev/null +++ b/lnd_diag/model1-model2/set_10_seas.ncl @@ -0,0 +1,1437 @@ +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$DIAG_SHARED/$VAR_MASTER" +load "$DIAG_SHARED/lnd_func.ncl" +load "$DIAG_SHARED/find_var_with_alts.ncl" +;************************************************ +; Modified by Sheri Mickelson - March 2013 +; The seasonal loop for set_2 was moved to the +; swift script to run the seasons in parallel. +;************************************************ +begin + print ("=========================================") + print ("Starting: set_2.ncl") + print ("Start Time: "+systemfunc("date") ) + print ("=========================================") + ptmpdir1 = getenv("PTMPDIR_1") + ptmpdir2 = getenv("PTMPDIR_2") + wkdir = getenv("WKDIR") + obsdata = getenv("OBS_DATA") + obs_res = getenv("OBS_RES") + paleo = getenv("paleo") + plotObs = getenv("plotObs") + useCommon1 = getenv("UseCommonName_1") + useCommon2 = getenv("UseCommonName_2") + raster = getenv("raster") + plot_type = getenv("PLOTTYPE") + land_mask = getenv("land_mask1") + user_defined = getenv("expContours") + cn = getenv("CN") + colormap = getenv("colormap") + projection=getenv("projection") + season = getenv("season") + + flandmask = stringtofloat(land_mask) + + nyrs1 = stringtointeger(getenv("clim_num_yrs_1")) + nyrs2 = stringtointeger(getenv("clim_num_yrs_2")) + +;************************************************************************** +; Cut off top and bottom percent to tighten contour intervals. 12/06 nanr +;************************************************************************** + pCut = 5 + pLow = 0.05 + pHigh = 0.95 + +; the set lists contains two columns of information. This comes into +; NCL as one long string that we need to parse out. + tmp = stringtocharacter(asciiread(wkdir+"/master_set2.txt",-1,"string")) + nvar = dimsizes(tmp(:,0)) + scale_type = new(nvar,"string") + vars = new(nvar,"string") + c13Flag = new(nvar,"string") + dynContour = new(nvar,"string") + do i=0,nvar-1 + scale_type(i) = charactertostring(tmp(i,0:12)) + dynContour(i) = charactertostring(tmp(i,13)) + vars(i) = charactertostring(tmp(i,16:)) + c13Flag(i) = charactertostring(tmp(i,16:19)) ; derived variable prefix + end do + +;************************************************* +; get case names and create filenames to read in +;************************************************* + sig_lvl = stringtofloat(getenv("sig_lvl") ) + zoom = stringtointeger(getenv("reg_contour")); # (1 = SUB, 0 = GLOBAL) + if (zoom.eq.1)then + min_lat = stringtofloat(getenv("min_lat")) + max_lat = stringtofloat(getenv("min_lat")) + min_lon = stringtofloat(getenv("min_lon")) + max_lon = stringtofloat(getenv("min_lon")) + end if +; seasons = (/"DJF","JJA","MAM","SON","ANN"/) +;************************************************* +; common plot resources +;************************************************* + pres = True ; panel only resources + +; read in case strings + cases = new(3,string) + prefix = new(2,string) + do m = 0,1 + if (m .eq. 0) then + useFlag = useCommon1 + end if + if (m .eq. 1) then + useFlag = useCommon2 + end if + if (useFlag .eq. "True") then + name1 = "commonName_"+(m+1) + else + name1 = "caseid_"+(m+1) + end if + name2 = "prefix_"+(m+1) + cases(m) = getenv(name1) + prefix(m) = getenv(name2) + end do + +; loop over seasons + +; do n = 0,dimsizes(seasons)-1 + print("Processing season " + season) + in1 = addfile(ptmpdir1+"/"+prefix(0)+"_"+season+"_climo.nc","r") ; used for first 3 plots + in2 = addfile(ptmpdir2+"/"+prefix(1)+"_"+season+"_climo.nc","r") ; used for first 3 plots + in11 = addfile(ptmpdir1+"/"+prefix(0)+"_"+season+"_means.nc","r") + in22 = addfile(ptmpdir2+"/"+prefix(1)+"_"+season+"_means.nc","r") + ; get degrees of freedom for t test + if (isatt(in11,"num_yrs_averaged")) then + dof_1 = in11@num_yrs_averaged + else + dof_1 = getfilevardimsizes(in11, "time") + end if + if (isatt(in22,"num_yrs_averaged")) then + dof_2 = in22@num_yrs_averaged + else + dof_2 = getfilevardimsizes(in22, "time") + end if +; open observation files + if (obs_res .eq. "T85") then + ptr_wm = addfile(obsdata+"/T85_WILLMOTT_"+season+"_climo.nc","r") ; temp and precip + else + ptr_wm = addfile(obsdata+"/WILLMOTT_"+season+"_climo.nc","r") ; temp and precip + end if + if (obs_res .eq. "T85") then + ptr_grdc = addfile(obsdata+"/T85_GRDC_"+season+"_climo.nc","r") ; runoff + ptr_fd = addfile(obsdata+"/T85_FOSTERDAVY_"+season+"_climo.nc","r") ; snow depth + else + ptr_grdc = addfile(obsdata+"/GRDC_"+season+"_climo.nc","r") ; runoff + ptr_fd = addfile(obsdata+"/FOSTERDAVY_"+season+"_climo.nc","r") ; snow depth + end if + ptr_scf = addfile(obsdata+"/NOAA_AVHRR_SNOWF_"+season+"_climo.070502.nc","r") ; snow cover + ptr_swe = addfile(obsdata+"/CMC_SWE_"+season+"_climo.070502.nc","r") ; snow depth + ptr_sd = addfile(obsdata+"/CMC_SNOWD_"+season+"_climo.070503.nc","r") ; swe + ptr_alb = addfile(obsdata+"/T42_MODIS_"+season+"_climo.070523.nc","r") ; albedo + ptr_asa = addfile(obsdata+"/modisradweighted_"+season+"_071105.nc","r") ; albedo + ptr_lhf = addfile(obsdata+"/MR_LHF_0.9x1.25_"+season+"_climo.nc","r") ; latent heat flux + ptr_gpp = addfile(obsdata+"/MR_GPP_0.9x1.25_"+season+"_climo.nc","r") ; gross primary production + ptr_lai = addfile(obsdata+"/MODIS_LAI_"+season+"_climo.nc","r") ; leaf area index + +; extract years averaged for plotting + if (isatt(in1,"yrs_averaged")) then + yrs_ave1 = in1@yrs_averaged + end if + if (isatt(in2,"yrs_averaged")) then + yrs_ave2 = in2@yrs_averaged + end if + +; check to make sure levels are the same for both model cases + nlev1 = 0 + nlev2 = 0 + if (isfilevar(in1,"levsoi")) then + nlev1 = getfilevardimsizes(in1,"levsoi") + end if + if (isfilevar(in2,"levsoi")) then + nlev2 = getfilevardimsizes(in2,"levsoi") + end if + if (isfilevar(in1,"levgrnd")) then + nlev1 = getfilevardimsizes(in1,"levgrnd") + end if + if (isfilevar(in2,"levgrnd")) then + nlev2 = getfilevardimsizes(in2,"levgrnd") + end if + if (nlev1 .eq. 0 .or. nlev2 .eq. 0) + print("FATAL ERROR Set2: nlev1 = 0") + exit + end if + + if (isfilevar(in1,"TLAKE") ) then + if (isfilevar(in1,"levlak") ) then + nlevlak1 = getfilevardimsizes(in1,"levlak") + end if + end if + if (isfilevar(in2,"TLAKE") ) then + if (isfilevar(in2,"levlak") ) then + nlevlak2 = getfilevardimsizes(in2,"levlak") + end if + end if + + lon1 = in1->lon + lon2 = in2->lon + lat1 = in1->lat + lat2 = in2->lat + nlon1 = dimsizes(lon1) + nlon2 = dimsizes(lon2) + nlat1 = dimsizes(lat1) + nlat2 = dimsizes(lat2) + time1 = in1->time + time2 = in2->time + +; extract landfrac if paleo file + if (paleo .eq. "True") then + flandmask = 0 + landfrac = in1->landfrac(:,:) + oro = new((/nlat1,nlon1/),integer) + oro = 1 + oro@_FillValue = 0 + oro = where(landfrac .gt. 0.0, 1,0) +; fname = wkdir+prefix(0) +; fname = wkdir+"basename" + fname = wkdir+getenv(name1) + paleo_outline(oro,lat1,lon1,flandmask, fname ) + res = True + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; truncate soil layers if needed + if (nlev1 .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix(0)+" nlev: " + nlev1 +"]") + nlev1 = 10 + end if + if (nlev2 .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix(1)+" nlev: " + nlev2 +"]") + nlev2 = 10 + end if + +; loop over variables + + snowFlag = 0 + do i=0,dimsizes(vars)-1 + + if (paleo .ne. "True") then + if (snowFlag .eq. 1) then ; run SNOWDP twice to see both obs datasets + i = i-1 + end if + if (vars(i) .eq. "SNOWDP") then + snowFlag = snowFlag + 1 + end if + end if + print("Processing variable " + vars(i)) + + plot_check_1 = True + plot_check_11 = True + plot_check_2 = True + plot_check_22 = True + + info = var_init(vars(i)) ; read in case 1 variable + filevar = find_var_with_alts(in1, vars(i), info) +; now filevar is the name of the variable in the file (either +; vars(i) or one of its alternate names); if this variable was +; not found in the file, then filevar will be "" + + plot_check = True + if (filevar .ne. "")then + if (isvar("x1")) then + delete(x1) + end if + if (isvar("x11")) then + delete(x11) + end if + if (c13Flag(i) .eq. "C13_") then + x1 = derive_del13C(vars(i),in1, scale_type(i),0,2) + x11 = derive_del13C(vars(i),in11,scale_type(i),0,2) + else + x1 = in1 ->$filevar$ + x11 = in11->$filevar$ + plot_check_1 = True + plot_check_11 = True + delete(filevar) + end if + else + if (info@derivedVariable .eq. True) then + x1 = derive_land(vars(i),in1) + x11 = derive_land(vars(i),in11) + else + print("variable "+ vars(i)+ " is not a defined variable.") + plot_check_1 = False + plot_check_11 = False + continue + end if + end if + + info = var_init(vars(i)) ; read in case 2 variable + filevar = find_var_with_alts(in2, vars(i), info) +; now filevar is the name of the variable in the file (either +; vars(i) or one of its alternate names); if this variable was +; not found in the file, then filevar will be "" + + plot_check = True + if (filevar .ne. "")then + if (isvar("x2")) then + delete(x2) + end if + if (isvar("x22")) then + delete(x22) + end if + if (c13Flag(i) .eq. "C13_") then + x2 = derive_del13C(vars(i),in2, scale_type(i),0,2) + x22 = derive_del13C(vars(i),in22,scale_type(i),0,2) + else + x2 = in2 ->$filevar$ + x22 = in22->$filevar$ + plot_check_2 = True + plot_check_22 = True + delete(filevar) + end if + else + if (info@derivedVariable .eq. True) then + x2 = derive_land(vars(i),in2) + x22 = derive_land(vars(i),in22) + else + print("variable "+ vars(i)+ " is not a defined variable.") + plot_check_2 = False + plot_check_22 = False + continue + end if + end if + + obsFlag = 0 ; read in observations (if present) (1 = on; 0 = off) + if (isvar("x3") ) then + delete(x3) + end if + if (paleo .ne. "True") then + if (vars(i) .eq. "TSA" ) then + x3 = ptr_wm->$vars(i)$ + x3 = x3 - 273.15 ; convert K to C + k2c = True + lon3 = ptr_wm->lon + lat3 = ptr_wm->lat + cases(2) = ptr_wm@case_id + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "PREC" ) then + x3 = ptr_wm->PREC + lon3 = ptr_wm->lon + lat3 = ptr_wm->lat + cases(2) = ptr_wm@case_id + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "TOTRUNOFF" ) then + x3 = ptr_grdc->RUNOFF + lon3 = ptr_grdc->lon + lat3 = ptr_grdc->lat + cases(2) = "GRDC" + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "SNOWDP" .and. snowFlag .eq. 1) then + x3 = ptr_fd->$vars(i)$ + lon3 = ptr_fd->lon + lat3 = ptr_fd->lat + cases(2) = "FOSTERDAVY" + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "FSNO" ) then ; fractional snow cover + x3 = ptr_scf->SCF + x3 = x3 * 0.01 ; convert from percent to 0-1 + lon3 = ptr_scf->lon + lat3 = ptr_scf->lat + cases(2) = "NOAA AVHRR (1967-2003)" + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "H2OSNO" ) then ; fractional snow cover + x3 = ptr_swe->SWE + lon3 = ptr_scf->lon + lat3 = ptr_scf->lat + cases(2) = "CMC (1980-1996)" + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "SNOWDP" .and. snowFlag .eq. 2) then + x3 = ptr_sd->SNOWD + lon3 = ptr_sd->lon + lat3 = ptr_sd->lat + cases(2) = "CMC (1980-1996)" + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "VBSA") then + x3 = ptr_alb->VBSA(0,:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_alb->lat + lon3 = ptr_alb->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "NBSA") then + x3 = ptr_alb->NBSA(0,:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_alb->lat + lon3 = ptr_alb->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "VWSA") then + x3 = ptr_alb->VWSA(0,:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_alb->lat + lon3 = ptr_alb->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "NWSA") then + x3 = ptr_alb->NWSA(0,:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_alb->lat + lon3 = ptr_alb->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "ASA") then + x3 = ptr_asa->BRDALB(0,:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_asa->lat + lon3 = ptr_asa->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "LHEAT") then + x3 = ptr_lhf->LHF(:,:) + cases(2) = "FLUXNET (1982-2008)" + lat3 = ptr_lhf->lat + lon3 = ptr_lhf->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if ((vars(i) .eq. "FPSN" .and. cn .eq. 0) .or. vars(i) .eq. "GPP") then + x3 = ptr_gpp->GPP(:,:) + cases(2) = "FLUXNET (1982-2008)" + lat3 = ptr_gpp->lat + lon3 = ptr_gpp->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "TLAI") then + x3 = ptr_lai->TLAI(:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_lai->lat + lon3 = ptr_lai->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + x3 = 0.0 + obsFlag = 0; + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if ; paleo + +; if (paleo .eq. "True") then +; obsFlag = 0 +; end if + + if (obsFlag .eq. 1) then + nlon3 = dimsizes(lon3) + nlat3 = dimsizes(lat3) + end if + + if (x1@units .eq. "K") then + x1 = x1 - 273.15 ; convert K to C + x2 = x2 - 273.15 ; convert K to C + end if + + ; catch NaNs + if (isvar("x1") ) then + x1 = catchNaN(x1) + end if + if (isvar("x11") ) then + x11 = catchNaN(x11) + end if + if (isvar("x2") ) then + x2 = catchNaN(x2) + end if + if (isvar("x22") ) then + x22 = catchNaN(x22) + end if + + if (all(ismissing(x1)))then + print("variable "+ vars(i)+ " contains all missing values.") + plot_check_1 = False + delete(x1) + end if + if (all(ismissing(x11)))then + print("variable "+ vars(i)+ " not found") + plot_check_11 = False + delete(x11) + end if + if (all(ismissing(x2)))then + print("variable "+ vars(i)+ " not found") + plot_check_2 = False + delete(x2) + end if + if (all(ismissing(x22)))then + print("variable "+ vars(i)+ " not found") + plot_check_22 = False + delete(x22) + end if + +; ---------------------- +; start plotting process if variables exist +; ---------------------- + if (plot_check_1 .eq. True .and. plot_check_11 .eq. True .and. plot_check_2 .eq. True .and. plot_check_22 .eq. True) then + x1 = scale_var(x1, scale_type(i),info) + x11 = scale_var(x11,scale_type(i),info) + x2 = scale_var(x2, scale_type(i),info) + x22 = scale_var(x22,scale_type(i),info) + +; flip longitudes to -180 to 180 if needed + if (min(x2&lon) .ge. 0 .and. min(x1&lon) .lt. 0) then + x2 = lonFlip(x2) + x22 = lonFlip(x22) + end if + if (min(x1&lon) .ge. 0 .and. min(x2&lon) .lt. 0) then + x1 = lonFlip(x1) + x11 = lonFlip(x11) + end if + +; process observations if present. Case 1 as root. +; flip x3 longitudes to -180 to 180 if needed + if (obsFlag .eq. 1) ; (1 = on; 0 = off) + if (min(x1&lon) .lt. 0 .and. min(x3&lon) .ge. 0) then + if (lon3(0) .ge. 0.) then + x3 = lonFlip(x3) + end if + end if + end if + + rank1 = dimsizes(dimsizes(x1)) ; calculate size + rank2 = dimsizes(dimsizes(x2)) + + useCase1 = True + useCase2 = False + if (nlat1 .ne. nlat2) then + if (nlat1 .gt. nlat2) then ; interpolate x1 to x2 grid + x1_interp = linint2_Wrap(lon1,lat1,x1,True,lon2,lat2,0) + x2_interp = x2 + x11_interp = linint2_Wrap(lon1,lat1,x11,True,lon2,lat2,0) + x22_interp = x22 + useCase1 = False + useCase2 = True + use_nlat = nlat2 + use_nlon = nlon2 + else + x2_interp = linint2_Wrap(lon2,lat2,x2,True,lon1,lat1,0) + x1_interp = x1 + + x22_interp = linint2_Wrap(lon2,lat2,x22,True,lon1,lat1,0) + x11_interp = x11 + use_nlat = nlat1 + use_nlon = nlon1 + end if + else + use_nlon = nlon1 + use_nlat = nlat1 + x1_interp = x1 + x2_interp = x2 + x11_interp = x11 + x22_interp = x22 + end if + +; interpolate obs to new grid if necessary + if (obsFlag .eq. 1) ; (1 = on; 0 = off) + if (nlat1 .ne. nlat3 .or. nlat2 .ne. nlat3) then + if (useCase1 .eq. True) then + x3_interp = linint2_Wrap(lon3,lat3,x3,True,lon1,lat1,0) ; interpolate x3 to x1 grid + end if + if (useCase2 .eq. True) then + x3_interp = linint2_Wrap(lon3,lat3,x3,True,lon2,lat2,0) ; interpolate x3 to x2 grid + end if + else + x3_interp = x3 + end if +; different plots for observations +; plot = new(9,graphic) + else +; x1_interp = x1 ; fix, per Keith Oleson +; plot = new(4,graphic) + end if ; end observations + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then + plot = new(9,graphic) + else + plot = new(4,graphic) + end if + + if (x1@units .eq. "K") then + useUnits = "C" ; assume same units for x1 and x2 + else + useUnits = x1@units ; assume same units for x1 and x2 + end if +;******************************************************************* +; 3D VARIABLES +;******************************************************************* + if (rank1.eq.3)then + +; calculate time average + var_avg_1 = dim_avg_n_Wrap(x1,0) ; time average + var_avg_2 = dim_avg_n_Wrap(x2,0) ; time average + var_avg_1_intp = dim_avg_n_Wrap(x1_interp,0) ; time average + var_avg_2_intp = dim_avg_n_Wrap(x2_interp,0) ; time average + + if (obsFlag .eq. 1) then ; (1 = obs; 0 = no obs) + x3_interp = mask(x3_interp, (x1_interp(0,:,:) .eq. x1_interp@_FillValue), False) ; remove non-land points + var_avg_3 = x3_interp + var_avg_3_intp = x3_interp + min3 = min(var_avg_3) + max3 = max(var_avg_3) + delete(x3_interp) + end if + + min1 = min(var_avg_1) + min2 = min(var_avg_2) + max1 = max(var_avg_1) + max2 = max(var_avg_2) + + if (min1 .le. min2) then + if (isvar("min12")) then + delete(min12) + end if + min12 = min1 + else + if (isvar("min12")) then + delete(min12) + end if + min12 = min2 + end if + + if (max1 .ge. max2) then + if (isvar("max12")) then + delete(max12) + end if + max12 = max1 + else + if(isvar("max12")) then + delete(max12) + end if + max12 = max2 + end if + +;************************************************************************** +; Set contour Levels: Dynamic vs pre-defined contours +;************************************************************************** + +; dynContour: 0=dynamic;1=explicit + if (info@cn_Explicit .eq. True .and. dynContour(i) .eq. 1 .and. user_defined .eq. 1) then + expFlag = True + else + expFlag = False + end if + + res = True + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + if (expFlag .eq. True) then ; explicitly defined contours. + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_Levels ; contour levels + else + if (c13Flag(i) .eq. "C13_") then + useMin12 = -40 ; hardcode boundaries to expectations + useMax12 = 0 ; hardcode boundaries to expectations + else + x = ndtooned(var_avg_1) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin12 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax12 = x(iHigh) + delete(x) + end if + maxLevel = 9 + + if (vars(i) .eq. "SNOWDP") then + useMin12 = 0. + useMax12 = 1. + end if + if (vars(i) .eq. "H2OSNO") then + useMin12 = 0. + useMax12 = 500. + end if + if (vars(i) .eq. "SNOWICE") then + useMin12 = 0. + useMax12 = 500. + end if + if (vars(i) .eq. "TWS") then + useMin12 = 4000. + useMax12 = 10000. + end if + + mnmxint = nice_mnmxintvl( useMin12, useMax12, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; explicit contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + + delete(useMin12) + delete(useMax12) + end if + +; open postscript file and choose colormap + if (isvar("wks")) then + delete(wks) + end if + if (vars(i) .eq. "SNOWDP") then + if (snowFlag .eq. 1) then + wks = gsn_open_wks(plot_type,wkdir + "set2_" + season+"_"+vars(i)+"_FOSTERDAVY") + else + wks = gsn_open_wks(plot_type,wkdir + "set2_" + season+"_"+vars(i)+"_CMC") + end if + else + wks = gsn_open_wks(plot_type,wkdir + "set2_" + season+"_"+vars(i)) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap + gsn_define_colormap(wks,cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) + res@cnFillPalette = cmap1(13:240,:) + end if + + res@tiMainString = cases(0) ; set case 1 titles + res@gsnCenterString = "(yrs " + yrs_ave1 + ")" + min1 = min(var_avg_1) + max1 = max(var_avg_1) + + res@gsnLeftString = "" ; vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + delete(min1) + delete(max1) + + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + + ctr = 0 + plot(ctr) = gsn_csm_contour_map(wks,var_avg_1,res) ; case 1 plot + ctr = ctr+1 + +; Note: Don't delete res here - save until after obs are posted. + res@tiMainString = cases(1) ; set case 2 titles + res@gsnCenterString = " (yrs " + yrs_ave2 + ")" + if (vars(i) .eq. "CH4PROD" .or. vars(i) .eq. "CH4_SURF_EBUL_SAT" .or. vars(i) .eq. "CH4_SURF_EBUL_UNSAT") then + if (isatt(res,"cnLabelBarEndStyle")) then + if (res@cnLabelBarEndStyle.eq."IncludeMinMaxLabels") then + res@cnLabelBarEndStyle = "IncludeOuterBoxes" ; temporarily turn off minmax labels. + end if + end if + else + if (isatt(res,"cnLabelBarEndStyle")) then + res@cnLabelBarEndStyle = "IncludeMinMaxLabels" + end if + end if + plot(ctr) = gsn_csm_contour_map(wks,var_avg_2,res) ; case 2 plot + ctr = ctr+1 + +; Note: Don't delete res here - save until after obs are posted. + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; OBS plot (if present) + res = True + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + res@tiMainString = cases(2) + res@gsnCenterString = " " + delete(min3) + delete(max3) + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + plot(ctr) = gsn_csm_contour_map(wks,var_avg_3,res) + ctr = ctr+1 + end if + delete(res) ; delete res in time for difference plots. + +; difference plots + if (isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag12.rgb") ; read in colormap (increase color resolution for difference maps) + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; OBS plot (if present) + res = True + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + res@cnFillPalette = cmap1(13:240,:) + + +; Case 1 - Obs + diff = var_avg_1_intp ; trick to maintain meta data + check = isdouble(var_avg_3_intp) + if (check .eq. True) then + diff = var_avg_1_intp - doubletofloat(var_avg_3_intp) + else + diff = var_avg_1_intp - var_avg_3_intp + end if + delete(check) + mindiff = min(diff) + maxdiff = max(diff) + res@tiMainString = cases(0) + res@gsnCenterString = "- Observations" +; set contour levels ( use larger contour intervals for Model vs Obs ) + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiffObs ; contour levels + + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(diff) + delete(mindiff) + delete(maxdiff) + ctr = ctr+1 + +; Case 2 - Obs + diff = var_avg_1_intp ; trick to maintain meta data + diff = var_avg_2_intp - doubletofloat(var_avg_3_intp) + res@tiMainString = cases(1) + res@gsnCenterString = "- Observations" + mindiff = min(diff) + maxdiff = max(diff) + + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiffObs ; contour levels + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(res) + delete(diff) + delete(mindiff) + delete(maxdiff) + ctr = ctr+2 + end if + +; Case 1 - Case 2 + diff = var_avg_1_intp ; trick to maintain meta data + diff = var_avg_1_intp - var_avg_2_intp + mindiff = min(diff) + maxdiff = max(diff) + +; set contour levels + res = True + res = set2ResMvM(res) + res@cnFillPalette = cmap1(13:240,:) + + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + if (expFlag .eq. False) then + sd = dim_stddev(ndtooned(diff)) + + if (vars(i).eq."PREC") then + useMin1 = -2.*sd ; = mindiff + useMax1 = 2.*sd ; = maxdiff + else + useMin1 = -2.5*sd ; = mindiff + useMax1 = 2.5*sd ; = maxdiff + end if + + if (c13Flag(i) .eq. "C13_") then + useMin1 = -40 ; hardcode boundaries to expectations + useMax1 = 0 ; hardcode boundaries to expectations + end if + if (mindiff.eq.maxdiff) then ; two cases are identical + useMin1 = -6 + useMax1 = 6 + diff = 0. + if (isatt(res,"cnLabelBarEndStyle")) then + if (res@cnLabelBarEndStyle.eq."IncludeMinMaxLabels") then + res@cnLabelBarEndStyle = "IncludeOuterBoxes" ; temporarily turn off minmax labels. + end if + end if + end if + maxLevel = 13 + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete([/useMin1,useMax1/]) + else + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiff ; contour levels + end if + + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + res@tiMainString = cases(0) + res@gsnCenterString= "- "+cases(1) + + res@gsnLeftString = "" ; vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ; useUnits ; assume same units for x1 and x2 + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(res) + ctr = ctr+1 + delete([/diff,mindiff,maxdiff/]) + + res = True ; T-Test plots + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + var_variance_1 = dim_variance_n_Wrap(x11_interp,0) ; time average + var_variance_2 = dim_variance_n_Wrap(x22_interp,0) ; time average + +; set variances < 0 to missing so don't divide by zero + if (any(var_variance_1 .le. 0)) then + var_variance_1 = mask(var_variance_1,(var_variance_1 .gt. 0),True) + end if + if (any(var_variance_2 .le. 0)) then + var_variance_2 = mask(var_variance_2,(var_variance_2 .gt. 0),True) + end if +; calculate t-test + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then + + pfinal = new((/use_nlat,use_nlon/),"float") + + diff13 = abs(var_avg_1_intp - doubletofloat(var_avg_3_intp)) + diff23 = abs(var_avg_2_intp - doubletofloat(var_avg_3_intp)) + delete(var_avg_3_intp) + copy_VarCoords(prob,diff13) + copy_VarCoords(prob,diff23) + + p1d = ndtooned(prob) + d131d = ndtooned(diff13) + d231d = ndtooned(diff23) + + do c=0,dimsizes(p1d)-1 + if (.not. ismissing(d131d(c)) .and. .not. ismissing(d231d(c) )) then + if (.not. ismissing(p1d(c))) then + if (d131d(c) .gt. d231d(c) ) then ; case 2 is closer to obs + if (p1d(c) .lt. sig_lvl ) then + p1d(c) = 10 ; blue + end if + else ; case 1 is closer to obs + if (p1d(c) .lt. sig_lvl ) then + p1d(c) = 5 ; red + end if + end if + end if + end if + end do + + pfinal = onedtond(p1d,dimsizes(prob)) + copy_VarCoords(prob,pfinal) + + res@gsnCenterString = "" + res@gsnRightString = "" + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.3 + res@tiMainString = cases(0)+" (green)" + res@gsnCenterString = cases(1)+" (red)" + ; res@tiMainString = "Case1+ (green) and Case2+ (red) relative to obs" + res@gsnLeftString = "Model relative to Obs" +; res@gsnSpreadColors = False + res@cnFillMode = "RasterFill" + res@cnLevelSelectionMode = "ExplicitLevels" + ; nanr 12/01/07 + if (isvar("cnLevels")) then + delete(res@cnLevels) + end if + res@cnLevels = (/2,7/) ; user defined sig level to view + res@cnFillColors = (/"white","green","red"/) ; white=NoChange; green=Case1 better; red=Case2 better +; res@cnFillColors = (/0,11,11/) ; only have red squares + + + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + + plot(ctr) = gsn_csm_contour_map(wks, pfinal,res) ; will be sig test + delete([/res,pfinal,p1d,d131d,d231d,diff13,diff23/]) + ctr = ctr+1 + end if + + res = True ; t-test plot (3D vars) + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + +; res@gsnSpreadColors = False + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = sig_lvl ; user defined sig level to view + res@cnFillColors = (/"red","white"/) ; only have red squares + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.245 + res@tiMainString = "T-Test of two Case means at each grid point" + res@gsnLeftString = "Cells are significant at " + sig_lvl +" level" + res@gsnRightString = "" + res@gsnCenterString = "" + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + res@tiMainOffsetYF = 0 + + plot(ctr) = gsn_csm_contour_map(wks,prob,res) ; will be sig test + delete(res) + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; panel plot + nrows = 3 + ncols = 3 + else + nrows = 2 + ncols = 2 + end if + pres@txString = season+" "+vars(i)+" ("+useUnits+")" ; " + gsn_panel(wks,plot,(/nrows,ncols/),pres) ; draw panel plot + else +;************************************************************************* +; 4D VARIABLES +;************************************************************************* +; calculate 4D average + var_avg_1 = dim_avg_n_Wrap(x1(:,:nlev1-1,:,:),0) ; time average + var_avg_1_intp = dim_avg_n_Wrap(x1_interp(:,:nlev1-1,:,:),0) ; time average + var_avg_2 = dim_avg_n_Wrap(x2(:,:nlev1-1,:,:),0) ; time average + var_avg_2_intp = dim_avg_n_Wrap(x2_interp(:,:nlev1-1,:,:),0) ; time average + + if (isdim(x1_interp,"levlak") )then + nlev1 = nlevlak1 + end if + if (isdim(x2_interp,"levlak") )then + nlev2 = nlevlak2 + end if + + if (nlev1 .ne. nlev2) then + print("Set 2 Failed: number of soil levels is unequal: " + nlev1 + " vs " + nlev2) + end if + + karr = (/0,4,9/) + do lev=0,2 ; assume both cases have same number of levels. + k = karr(lev) + res = True + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if +; open postscript file (4D) + if (isvar("wks")) then + delete(wks) + end if + wks = gsn_open_wks(plot_type,wkdir + "set2_" + season+"_"+vars(i)+"_"+k) + if (isvar("cmap")) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap + gsn_define_colormap(wks,cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) + res@cnFillPalette = cmap1(13:240,:) + end if + + min1 = min(var_avg_1(k,:,:)) + max1 = max(var_avg_1(k,:,:)) + + if (expFlag .eq. True) then ; set explicit contour levels (4D) + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + st = "cn_Level"+k ; get cn levels for a specific levsoi + res@cnLevels = info@$st$ + else + maxLevel = 9 + x = ndtooned(var_avg_1(k,:,:)) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin1 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax1 = x(iHigh) + + delete(x) + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + + res@pmLabelBarHeightF = 0.085 + res@pmLabelBarWidthF = 0.4 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = -.01 + res@tiMainString = cases(0) + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + res@gsnCenterString = "(yrs " + yrs_ave1 +")" + + plot(0) = gsn_csm_contour_map(wks,var_avg_1(k,:,:),res) ; 4D case 1 plot + +; set case 2 titles (4D) +; Note: Don't delete res here - save until after obs are posted. + + delete(min1) + delete(max1) + + res@tiMainString = cases(1) + res@gsnCenterString = "(yrs " + yrs_ave2 +")" + plot(1) = gsn_csm_contour_map(wks,var_avg_2(k,:,:),res) ; 4D case 2 plot + delete(res) + + res = True + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; diff plot (4D) + if (isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag12.rgb") ; read in colormap ( increase color resolution for difference maps) + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + if (colormap.eq.1) then + res@cnFillPalette = cmap1(13:240,:) + end if + + diff = var_avg_1_intp ; trick to maintain meta data + diff(k,:,:) = var_avg_1_intp(k,:,:) - var_avg_2_intp(k,:,:) + res@tiMainString = "Case 1 - Case 2" + mindiff = min(diff(k,:,:)) + maxdiff = max(diff(k,:,:)) + + if (expFlag .eq. True) then + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + st = "cn_Level"+k+"Diff" ; get cn levels for a specific levsoi + res@cnLevels = info@$st$ + else + sd = dim_stddev(ndtooned(diff)) + + if (vars(i).eq."PREC") then + useMin1 = -2.*sd ; = mindiff + useMax1 = 2.*sd ; = maxdiff + else + useMin1 = -2.5*sd ; = mindiff + useMax1 = 2.5*sd ; = maxdiff + end if + if (useMax1 .gt. maxdiff) then + useMin1 = -sd/5. + useMax1 = sd/5. + end if + maxLevel = 13 + + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + res@pmLabelBarHeightF = 0.085 + res@pmLabelBarWidthF = 0.4 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = -.01 + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + res@tiMainString = cases(0) + res@gsnCenterString = "- "+cases(1) + plot(2) = gsn_csm_contour_map(wks,diff(k,:,:),res) + delete(res) + delete(diff) + + res = True ; T-Test plot (4D) + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + var_variance_1 = dim_variance_n_Wrap(x11_interp(:,:nlev1-1,:,:),0) + var_variance_2 = dim_variance_n_Wrap(x22_interp(:,:nlev2-1,:,:),0) + + if (any(var_variance_1 .le. 0)) then ;set variances < 0 to missing so don't divide by zero + var_variance_1 = mask(var_variance_1,(var_variance_1 .gt. 0),True) + end if + if (any(var_variance_2 .le. 0)) then + var_variance_2 = mask(var_variance_2,(var_variance_2 .gt. 0),True) + end if + + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = sig_lvl ; user defined sig level to view + res@cnFillColors = (/"red","white"/) ; only have red squares + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.245 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = 0 + res@tiMainString = "T-Test of two Case means at each grid point" + res@gsnCenterString = " " + res@gsnRightString = " " + res@gsnLeftString = "Cells are significant at " + sig_lvl +" level" + plot(3) = gsn_csm_contour_map(wks,prob(k,:,:),res) ; will be sig test + delete(res) + pres@txString = season+" "+vars(i)+" Level " + k+" ("+useUnits+")" ; " + gsn_panel(wks,plot,(/2,2/),pres) ; draw panel plot + end do ; level loop + end if ; 4d variables + delete([/x1,x11,x2,x1_interp,x2_interp,x11_interp,x22_interp,x22,prob,var_avg_1,var_avg_2/]) + if (isvar("var_avg_3") ) then + delete(var_avg_3) + end if + if (isvar("cmap1") ) then + delete(cmap1) + end if + delete(var_avg_1_intp) + delete(var_avg_2_intp) + delete(var_variance_1) + delete(var_variance_2) + delete(plot) + else + print("Variable " + vars(i) + " does not exist.") + end if ; plot_check loop + if (isvar("useUnits")) then + delete(useUnits) + end if + if (isvar("lon3")) then + delete(lon3) + end if + if (isvar("lat3")) then + delete(lat3) + end if + end do ; end variable loop +; end do ; seasons loop +; remove error file if program completes successfully. + filo = wkdir +"/set2_error_file" + system("/bin/rm -f " + filo) +end diff --git a/lnd_diag/model1-model2/set_11.ncl b/lnd_diag/model1-model2/set_11.ncl new file mode 100755 index 00000000..e67f55e6 --- /dev/null +++ b/lnd_diag/model1-model2/set_11.ncl @@ -0,0 +1,1463 @@ +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$DIAG_SHARED/$VAR_MASTER" +load "$DIAG_SHARED/lnd_func.ncl" +load "$DIAG_SHARED/find_var_with_alts.ncl" +;************************************************ +begin + print ("=========================================") + print ("Starting: set_11.ncl") + print ("Start Time: "+systemfunc("date") ) + print ("=========================================") + ptmpdir1 = getenv("PTMPDIR_1") + ptmpdir2 = getenv("PTMPDIR_2") + wkdir = getenv("WKDIR") + obsdata = getenv("OBS_DATA") + obs_res = getenv("OBS_RES") + paleo = getenv("paleo") + plotObs = getenv("plotObs") + useCommon1 = getenv("UseCommonName_1") + useCommon2 = getenv("UseCommonName_2") + raster = getenv("raster") + plot_type = getenv("PLOTTYPE") + land_mask = getenv("land_mask1") + user_defined = getenv("expContours") + cn = getenv("CN") + colormap = getenv("colormap") + projection=getenv("projection") + +;; print((/"PTMPDIR_1: "+ptmpdir1/)) +;; print((/"PTMPDIR_2: "+ptmpdir2/)) +;; print((/"wkdir: "+wkdir/)) + + flandmask = stringtofloat(land_mask) + + nyrs1 = stringtointeger(getenv("clim_num_yrs_1")) + nyrs2 = stringtointeger(getenv("clim_num_yrs_2")) + +;************************************************************************** +; Cut off top and bottom percent to tighten contour intervals. 12/06 nanr +;************************************************************************** + pCut = 5 + pLow = 0.05 + pHigh = 0.95 + +; the set lists contains two columns of information. This comes into +; NCL as one long string that we need to parse out. + tmp = stringtocharacter(asciiread(wkdir+"/master_set11.txt",-1,"string")) + nvar = dimsizes(tmp(:,0)) + scale_type = new(nvar,"string") + vars = new(nvar,"string") + c13Flag = new(nvar,"string") + dynContour = new(nvar,"string") + do i=0,nvar-1 + scale_type(i) = charactertostring(tmp(i,0:12)) + dynContour(i) = charactertostring(tmp(i,13)) + vars(i) = charactertostring(tmp(i,16:)) + c13Flag(i) = charactertostring(tmp(i,16:19)) ; derived variable prefix + end do + +;************************************************* +; get case names and create filenames to read in +;************************************************* + sig_lvl = stringtofloat(getenv("sig_lvl") ) + zoom = stringtointeger(getenv("reg_contour")); # (1 = SUB, 0 = GLOBAL) + if (zoom.eq.1)then + min_lat = stringtofloat(getenv("min_lat")) + max_lat = stringtofloat(getenv("min_lat")) + min_lon = stringtofloat(getenv("min_lon")) + max_lon = stringtofloat(getenv("min_lon")) + end if + seasons = (/"DJF","JJA","MAM","SON","ANN"/) +;************************************************* +; common plot resources +;************************************************* + pres = True ; panel only resources + +; read in case strings + cases = new(3,string) + prefix = new(2,string) + do m = 0,1 + if (m .eq. 0) then + useFlag = useCommon1 + end if + if (m .eq. 1) then + useFlag = useCommon2 + end if + if (useFlag .eq. "True") then + name1 = "commonName_"+(m+1) + else + name1 = "caseid_"+(m+1) + end if + name2 = "prefix_"+(m+1) + cases(m) = getenv(name1) + prefix(m) = getenv(name2) + end do + +; loop over seasons + + do n = 0,dimsizes(seasons)-1 + print("Processing season " + seasons(n)) + in1 = addfile(ptmpdir1+"/"+prefix(0)+"_"+seasons(n)+"_climo.nc","r") ; used for first 3 plots + in2 = addfile(ptmpdir2+"/"+prefix(1)+"_"+seasons(n)+"_climo.nc","r") ; used for first 3 plots + in11 = addfile(ptmpdir1+"/"+prefix(0)+"_"+seasons(n)+"_means.nc","r") + in22 = addfile(ptmpdir2+"/"+prefix(1)+"_"+seasons(n)+"_means.nc","r") + ; get degrees of freedom for t test + if (isatt(in11,"num_yrs_averaged")) then + dof_1 = in11@num_yrs_averaged + else + dof_1 = getfilevardimsizes(in11, "time") + end if + if (isatt(in22,"num_yrs_averaged")) then + dof_2 = in22@num_yrs_averaged + else + dof_2 = getfilevardimsizes(in22, "time") + end if +; open observation files + + ptr_racmo = addfile(obsdata+"/RACMO2_ANT_"+seasons(n)+"_climo.nc","r") ; RACMO2.4 + +; extract years averaged for plotting + if (isatt(in1,"yrs_averaged")) then + yrs_ave1 = in1@yrs_averaged + end if + if (isatt(in2,"yrs_averaged")) then + yrs_ave2 = in2@yrs_averaged + end if + +; check to make sure levels are the same for both model cases + nlev1 = 0 + nlev2 = 0 + if (isfilevar(in1,"levsoi")) then + nlev1 = getfilevardimsizes(in1,"levsoi") + end if + if (isfilevar(in2,"levsoi")) then + nlev2 = getfilevardimsizes(in2,"levsoi") + end if + if (isfilevar(in1,"levgrnd")) then + nlev1 = getfilevardimsizes(in1,"levgrnd") + end if + if (isfilevar(in2,"levgrnd")) then + nlev2 = getfilevardimsizes(in2,"levgrnd") + end if + if (nlev1 .eq. 0 .or. nlev2 .eq. 0) + print("FATAL ERROR Set2: nlev1 = 0") + exit + end if + + if (isfilevar(in1,"TLAKE") ) then + if (isfilevar(in1,"levlak") ) then + nlevlak1 = getfilevardimsizes(in1,"levlak") + end if + end if + if (isfilevar(in2,"TLAKE") ) then + if (isfilevar(in2,"levlak") ) then + nlevlak2 = getfilevardimsizes(in2,"levlak") + end if + end if + + lon1 = in1->lon + lon2 = in2->lon + lat1 = in1->lat + lat2 = in2->lat + nlon1 = dimsizes(lon1) + nlon2 = dimsizes(lon2) + nlat1 = dimsizes(lat1) + nlat2 = dimsizes(lat2) + time1 = in1->time + time2 = in2->time + +; extract landfrac if paleo file + if (paleo .eq. "True") then + flandmask = 0 + landfrac = in1->landfrac(:,:) + oro = new((/nlat1,nlon1/),integer) + oro = 1 + oro@_FillValue = 0 + oro = where(landfrac .gt. 0.0, 1,0) +; fname = wkdir+prefix(0) +; fname = wkdir+"basename" + fname = wkdir+getenv(name1) + paleo_outline(oro,lat1,lon1,flandmask, fname ) + res = True + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; truncate soil layers if needed + if (nlev1 .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix(0)+" nlev: " + nlev1 +"]") + nlev1 = 10 + end if + if (nlev2 .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix(1)+" nlev: " + nlev2 +"]") + nlev2 = 10 + end if + +; loop over variables + + snowFlag = 0 + do i=0,dimsizes(vars)-1 + + if (paleo .ne. "True") then + if (snowFlag .eq. 1) then ; run SNOWDP twice to see both obs datasets + i = i-1 + end if + if (vars(i) .eq. "SNOWDP") then + snowFlag = snowFlag + 1 + end if + end if + print("Processing variable " + vars(i)) + + plot_check_1 = True + plot_check_11 = True + plot_check_2 = True + plot_check_22 = True + + info = var_init(vars(i)) ; read in case 1 variable + filevar = find_var_with_alts(in1, vars(i), info) +; now filevar is the name of the variable in the file (either +; vars(i) or one of its alternate names); if this variable was +; not found in the file, then filevar will be "" + + plot_check = True + if (filevar .ne. "")then + if (isvar("x1")) then + delete(x1) + end if + if (isvar("x11")) then + delete(x11) + end if + if (c13Flag(i) .eq. "C13_") then + x1 = derive_del13C(vars(i),in1, scale_type(i),0,2) + x11 = derive_del13C(vars(i),in11,scale_type(i),0,2) + else + x1 = in1 ->$filevar$ + x11 = in11->$filevar$ + plot_check_1 = True + plot_check_11 = True + delete(filevar) + end if + else + if (info@derivedVariable .eq. True) then + x1 = derive_land(vars(i),in1) + x11 = derive_land(vars(i),in11) + else + print("variable "+ vars(i)+ " is not a defined variable.") + plot_check_1 = False + plot_check_11 = False + continue + end if + end if + + info = var_init(vars(i)) ; read in case 2 variable + filevar = find_var_with_alts(in2, vars(i), info) +; now filevar is the name of the variable in the file (either +; vars(i) or one of its alternate names); if this variable was +; not found in the file, then filevar will be "" + + plot_check = True + if (filevar .ne. "")then + if (isvar("x2")) then + delete(x2) + end if + if (isvar("x22")) then + delete(x22) + end if + if (c13Flag(i) .eq. "C13_") then + x2 = derive_del13C(vars(i),in2, scale_type(i),0,2) + x22 = derive_del13C(vars(i),in22,scale_type(i),0,2) + else + x2 = in2 ->$filevar$ + x22 = in22->$filevar$ + plot_check_2 = True + plot_check_22 = True + delete(filevar) + end if + else + if (info@derivedVariable .eq. True) then + x2 = derive_land(vars(i),in2) + x22 = derive_land(vars(i),in22) + else + print("variable "+ vars(i)+ " is not a defined variable.") + plot_check_2 = False + plot_check_22 = False + continue + end if + end if + + obsFlag = 0 ; read in observations (if present) (1 = on; 0 = off) + if (isvar("x3") ) then + delete(x3) + end if + if (paleo .ne. "True") then + if (vars(i) .eq. "ASA" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3*100. ; fraction to % + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "QSOIL" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "U10" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FGR" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FLDS" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FIRE" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3 * -1. ; defined negative in RACMO2.4 + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "RAIN" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "PBOT" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "Q2M" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "QICE_FRZ" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "RH2M" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3 * 100. ; fraction to % + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "QICE_MELT" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FSH" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3 * -1. ; defined neg. in CESM + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "QICE" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "SNOW" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "QSNOMELT" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FSDS" ) then + x3 = ptr_racmo->$vars(i)$ + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "FSR" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3 * -1. ; defined as negative in RACMO2.4 + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + else + if (vars(i) .eq. "TSA" ) then + x3 = ptr_racmo->$vars(i)$ + x3 = x3 - 273.16 ; K to C + lon3 = ptr_racmo->lon + lat3 = ptr_racmo->lat + cases(2) = "RACMO2.4" + obsFlag = 1; + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + + end if ; paleo + + if (paleo .eq. "True") then + obsFlag = 0 + end if + + if (obsFlag .eq. 1) then + nlon3 = dimsizes(lon3) + nlat3 = dimsizes(lat3) + end if + + if (x1@units .eq. "K") then + x1 = x1 - 273.15 ; convert K to C + x2 = x2 - 273.15 ; convert K to C + end if + + ; catch NaNs + if (isvar("x1") ) then + x1 = catchNaN(x1) + end if + if (isvar("x11") ) then + x11 = catchNaN(x11) + end if + if (isvar("x2") ) then + x2 = catchNaN(x2) + end if + if (isvar("x22") ) then + x22 = catchNaN(x22) + end if + + if (all(ismissing(x1)))then + print("variable "+ vars(i)+ " contains all missing values.") + plot_check_1 = False + delete(x1) + end if + if (all(ismissing(x11)))then + print("variable "+ vars(i)+ " not found") + plot_check_11 = False + delete(x11) + end if + if (all(ismissing(x2)))then + print("variable "+ vars(i)+ " not found") + plot_check_2 = False + delete(x2) + end if + if (all(ismissing(x22)))then + print("variable "+ vars(i)+ " not found") + plot_check_22 = False + delete(x22) + end if + +; ---------------------- +; start plotting process if variables exist +; ---------------------- + if (plot_check_1 .eq. True .and. plot_check_11 .eq. True .and. plot_check_2 .eq. True .and. plot_check_22 .eq. True) then + x1 = scale_var(x1, scale_type(i),info) + x11 = scale_var(x11,scale_type(i),info) + x2 = scale_var(x2, scale_type(i),info) + x22 = scale_var(x22,scale_type(i),info) + +; flip longitudes to -180 to 180 if needed + if (min(x2&lon) .ge. 0 .and. min(x1&lon) .lt. 0) then + x2 = lonFlip(x2) + x22 = lonFlip(x22) + end if + if (min(x1&lon) .ge. 0 .and. min(x2&lon) .lt. 0) then + x1 = lonFlip(x1) + x11 = lonFlip(x11) + end if + +; process observations if present. Case 1 as root. +; flip x3 longitudes to -180 to 180 if needed + if (obsFlag .eq. 1) ; (1 = on; 0 = off) + if (min(x1&lon) .lt. 0 .and. min(x3&lon) .ge. 0) then + if (lon3(0) .ge. 0.) then + x3 = lonFlip(x3) + end if + end if + end if + + rank1 = dimsizes(dimsizes(x1)) ; calculate size + rank2 = dimsizes(dimsizes(x2)) + + useCase1 = True + useCase2 = False + if (nlat1 .ne. nlat2) then + if (nlat1 .gt. nlat2) then ; interpolate x1 to x2 grid + x1_interp = linint2_Wrap(lon1,lat1,x1,True,lon2,lat2,0) + x2_interp = x2 + x11_interp = linint2_Wrap(lon1,lat1,x11,True,lon2,lat2,0) + x22_interp = x22 + useCase1 = False + useCase2 = True + use_nlat = nlat2 + use_nlon = nlon2 + else + x2_interp = linint2_Wrap(lon2,lat2,x2,True,lon1,lat1,0) + x1_interp = x1 + + x22_interp = linint2_Wrap(lon2,lat2,x22,True,lon1,lat1,0) + x11_interp = x11 + use_nlat = nlat1 + use_nlon = nlon1 + end if + else + use_nlon = nlon1 + use_nlat = nlat1 + x1_interp = x1 + x2_interp = x2 + x11_interp = x11 + x22_interp = x22 + end if + +; interpolate obs to new grid if necessary + if (obsFlag .eq. 1) ; (1 = on; 0 = off) + if (nlat1 .ne. nlat3 .or. nlat2 .ne. nlat3) then + if (useCase1 .eq. True) then + x3_interp = linint2_Wrap(lon3,lat3,x3,True,lon1,lat1,0) ; interpolate x3 to x1 grid + end if + if (useCase2 .eq. True) then + x3_interp = linint2_Wrap(lon3,lat3,x3,True,lon2,lat2,0) ; interpolate x3 to x2 grid + end if + else + x3_interp = x3 + end if +; different plots for observations +; plot = new(9,graphic) + else +; x1_interp = x1 ; fix, per Keith Oleson +; plot = new(4,graphic) + end if ; end observations + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then + plot = new(9,graphic) + else + plot = new(4,graphic) + end if + + if (x1@units .eq. "K") then + useUnits = "C" ; assume same units for x1 and x2 + else + useUnits = x1@units ; assume same units for x1 and x2 + end if +;******************************************************************* +; 3D VARIABLES +;******************************************************************* + if (rank1.eq.3)then + +; calculate time average + var_avg_1 = dim_avg_n_Wrap(x1,0) ; time average + var_avg_2 = dim_avg_n_Wrap(x2,0) ; time average + var_avg_1_intp = dim_avg_n_Wrap(x1_interp,0) ; time average + var_avg_2_intp = dim_avg_n_Wrap(x2_interp,0) ; time average + + if (obsFlag .eq. 1) then ; (1 = obs; 0 = no obs) + x3_interp = mask(x3_interp, (x1_interp(0,:,:) .eq. x1_interp@_FillValue), False) ; remove non-land points + var_avg_3 = x3_interp + var_avg_3_intp = x3_interp + min3 = min(var_avg_3) + max3 = max(var_avg_3) + delete(x3_interp) + end if + + min1 = min(var_avg_1) + min2 = min(var_avg_2) + max1 = max(var_avg_1) + max2 = max(var_avg_2) + + if (min1 .le. min2) then + if (isvar("min12")) then + delete(min12) + end if + min12 = min1 + else + if (isvar("min12")) then + delete(min12) + end if + min12 = min2 + end if + + if (max1 .ge. max2) then + if (isvar("max12")) then + delete(max12) + end if + max12 = max1 + else + if(isvar("max12")) then + delete(max12) + end if + max12 = max2 + end if + +;************************************************************************** +; Set contour Levels: Dynamic vs pre-defined contours +;************************************************************************** + +; dynContour: 0=dynamic;1=explicit + if (info@cn_Explicit .eq. True .and. dynContour(i) .eq. 1 .and. user_defined .eq. 1) then + expFlag = True + else + expFlag = False + end if + + res = True + res = set11ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + if (expFlag .eq. True) then ; explicitly defined contours. + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_Levels ; contour levels + else + if (c13Flag(i) .eq. "C13_") then + useMin12 = -40 ; hardcode boundaries to expectations + useMax12 = 0 ; hardcode boundaries to expectations + else + x = ndtooned(var_avg_1) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin12 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax12 = x(iHigh) + delete(x) + end if + maxLevel = 9 + if (vars(i) .eq. "SNOWDP") then + useMin12 = 0. + useMax12 = 1. + end if + if (vars(i) .eq. "H2OSNO") then + useMin12 = 0. + useMax12 = 500. + end if + if (vars(i) .eq. "SNOWICE") then + useMin12 = 0. + useMax12 = 500. + end if + if (vars(i) .eq. "TWS") then + useMin12 = 4000. + useMax12 = 10000. + end if + mnmxint = nice_mnmxintvl( useMin12, useMax12, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; explicit contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + + delete(useMin12) + delete(useMax12) + end if + +; open postscript file and choose colormap + if (isvar("wks")) then + delete(wks) + end if + if (vars(i) .eq. "SNOWDP") then + if (snowFlag .eq. 1) then +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) +;; print((/"Fullname: "+wkdir + "set11_" + seasons(n)+"_"+vars(i)+"_FOSTERDAVY"/)) + fullname = wkdir+"set11_"+seasons(n)+"_"+vars(i)+"_FOSTERDAVY" +;; print((/"Fullname: "+fullname/)) +;; wks = gsn_open_wks(plot_type,wkdir + "set11_" + seasons(n)+"_"+vars(i)+"_FOSTERDAVY") + wks = gsn_open_wks(plot_type,fullname) + else +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) +;; print((/"Fullname: "+wkdir + "set11_" + seasons(n)+"_"+vars(i)+"_CMC"/)) + fullname = wkdir + "set11_" + seasons(n)+"_"+vars(i)+"_CMC" +;; print((/"Fullname: "+fullname/)) +;; wks = gsn_open_wks(plot_type,wkdir + "set11_" + seasons(n)+"_"+vars(i)+"_CMC") + wks = gsn_open_wks(plot_type,fullname) + end if + else +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) + fullname = wkdir + "set11_" + seasons(n)+"_"+vars(i) +;; print((/"Fullname: "+fullname/)) +;; print((/"Fullname: "+wkdir + "set11_" + seasons(n)+"_"+vars(i)/)) +;; wks = gsn_open_wks(plot_type,wkdir + "set11_" + seasons(n)+"_"+vars(i)) + wks = gsn_open_wks(plot_type,fullname) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap + gsn_define_colormap(wks,cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) + res@cnFillPalette = cmap1(13:240,:) + end if + + res@tiMainString = cases(0) ; set case 1 titles + res@gsnCenterString = "(yrs " + yrs_ave1 + ")" + min1 = min(var_avg_1) + max1 = max(var_avg_1) + + res@gsnLeftString = "" ; vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + delete(min1) + delete(max1) + + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + + ctr = 0 + plot(ctr) = gsn_csm_contour_map(wks,var_avg_1,res) ; case 1 plot + ctr = ctr+1 + +; Note: Don't delete res here - save until after obs are posted. + res@tiMainString = cases(1) ; set case 2 titles + res@gsnCenterString = " (yrs " + yrs_ave2 + ")" + if (vars(i) .eq. "CH4PROD" .or. vars(i) .eq. "CH4_SURF_EBUL_SAT" .or. vars(i) .eq. "CH4_SURF_EBUL_UNSAT" .or. vars(i) .eq. "CPOOL") then + if (isatt(res,"cnLabelBarEndStyle")) then + if (res@cnLabelBarEndStyle.eq."IncludeMinMaxLabels") then + res@cnLabelBarEndStyle = "IncludeOuterBoxes" ; temporarily turn off minmax labels. + end if + end if + else + if (isatt(res,"cnLabelBarEndStyle")) then + res@cnLabelBarEndStyle = "IncludeMinMaxLabels" + end if + end if + plot(ctr) = gsn_csm_contour_map(wks,var_avg_2,res) ; case 2 plot + ctr = ctr+1 + +; Note: Don't delete res here - save until after obs are posted. + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; OBS plot (if present) + res = True + res = set11ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + res@tiMainString = cases(2) + res@gsnCenterString = " " + delete(min3) + delete(max3) + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + plot(ctr) = gsn_csm_contour_map(wks,var_avg_3,res) + ctr = ctr+1 + end if + delete(res) ; delete res in time for difference plots. + +; difference plots + if (isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag12.rgb") ; read in colormap (increase color resolution for difference maps) + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; OBS plot (if present) + res = True + res = set11ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + res@cnFillPalette = cmap1(13:240,:) + + +; Case 1 - Obs + diff = var_avg_1_intp ; trick to maintain meta data + check = isdouble(var_avg_3_intp) + if (check .eq. True) then + diff = var_avg_1_intp - doubletofloat(var_avg_3_intp) + else + diff = var_avg_1_intp - var_avg_3_intp + end if + delete(check) + mindiff = min(diff) + maxdiff = max(diff) + res@tiMainString = cases(0) + res@gsnCenterString = "- Observations" +; set contour levels ( use larger contour intervals for Model vs Obs ) + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiffObs ; contour levels + + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(diff) + delete(mindiff) + delete(maxdiff) + ctr = ctr+1 + +; Case 2 - Obs + diff = var_avg_1_intp ; trick to maintain meta data + diff = var_avg_2_intp - doubletofloat(var_avg_3_intp) + res@tiMainString = cases(1) + res@gsnCenterString = "- Observations" + mindiff = min(diff) + maxdiff = max(diff) + + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiffObs ; contour levels + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(res) + delete(diff) + delete(mindiff) + delete(maxdiff) + ctr = ctr+2 + end if + +; Case 1 - Case 2 + diff = var_avg_1_intp ; trick to maintain meta data + diff = var_avg_1_intp - var_avg_2_intp + mindiff = min(diff) + maxdiff = max(diff) + +; set contour levels + res = True + res = set11ResMvM(res) + res@cnFillPalette = cmap1(13:240,:) + + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + if (expFlag .eq. False) then + sd = dim_stddev(ndtooned(diff)) + + if (vars(i).eq."PREC") then + useMin1 = -2.*sd ; = mindiff + useMax1 = 2.*sd ; = maxdiff + else + useMin1 = -2.5*sd ; = mindiff + useMax1 = 2.5*sd ; = maxdiff + end if + + if (c13Flag(i) .eq. "C13_") then + useMin1 = -40 ; hardcode boundaries to expectations + useMax1 = 0 ; hardcode boundaries to expectations + end if + if (mindiff.eq.maxdiff) then ; two cases are identical + useMin1 = -6 + useMax1 = 6 + diff = 0. + if (isatt(res,"cnLabelBarEndStyle")) then + if (res@cnLabelBarEndStyle.eq."IncludeMinMaxLabels") then + res@cnLabelBarEndStyle = "IncludeOuterBoxes" ; temporarily turn off minmax labels. + end if + end if + end if + maxLevel = 13 + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete([/useMin1,useMax1/]) + else + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiff ; contour levels + end if + + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + res@tiMainString = cases(0) + res@gsnCenterString= "- "+cases(1) + + res@gsnLeftString = "" ; vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ; useUnits ; assume same units for x1 and x2 + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(res) + ctr = ctr+1 + delete([/diff,mindiff,maxdiff/]) + + res = True ; T-Test plots + res = set11ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + var_variance_1 = dim_variance_n_Wrap(x11_interp,0) ; time average + var_variance_2 = dim_variance_n_Wrap(x22_interp,0) ; time average + +; set variances < 0 to missing so don't divide by zero + if (any(var_variance_1 .le. 0)) then + var_variance_1 = mask(var_variance_1,(var_variance_1 .gt. 0),True) + end if + if (any(var_variance_2 .le. 0)) then + var_variance_2 = mask(var_variance_2,(var_variance_2 .gt. 0),True) + end if +; calculate t-test + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then + + pfinal = new((/use_nlat,use_nlon/),"float") + + diff13 = abs(var_avg_1_intp - doubletofloat(var_avg_3_intp)) + diff23 = abs(var_avg_2_intp - doubletofloat(var_avg_3_intp)) + delete(var_avg_3_intp) + copy_VarCoords(prob,diff13) + copy_VarCoords(prob,diff23) + + p1d = ndtooned(prob) + d131d = ndtooned(diff13) + d231d = ndtooned(diff23) + + do c=0,dimsizes(p1d)-1 + if (.not. ismissing(d131d(c)) .and. .not. ismissing(d231d(c) )) then + if (.not. ismissing(p1d(c))) then + if (d131d(c) .gt. d231d(c) ) then ; case 2 is closer to obs + if (p1d(c) .lt. sig_lvl ) then + p1d(c) = 10 ; blue + end if + else ; case 1 is closer to obs + if (p1d(c) .lt. sig_lvl ) then + p1d(c) = 5 ; red + end if + end if + end if + end if + end do + + pfinal = onedtond(p1d,dimsizes(prob)) + copy_VarCoords(prob,pfinal) + + res@gsnCenterString = "" + res@gsnRightString = "" + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.3 + res@tiMainString = cases(0)+" (green)" + res@gsnCenterString = cases(1)+" (red)" + ; res@tiMainString = "Case1+ (green) and Case2+ (red) relative to obs" + res@gsnLeftString = "Model relative to Obs" +; res@gsnSpreadColors = False + res@cnFillMode = "RasterFill" + res@cnLevelSelectionMode = "ExplicitLevels" + ; nanr 12/01/07 + if (isvar("cnLevels")) then + delete(res@cnLevels) + end if + res@cnLevels = (/2,7/) ; user defined sig level to view + res@cnFillColors = (/"white","green","red"/) ; white=NoChange; green=Case1 better; red=Case2 better +; res@cnFillColors = (/0,11,11/) ; only have red squares + + + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + + plot(ctr) = gsn_csm_contour_map(wks, pfinal,res) ; will be sig test + delete([/res,pfinal,p1d,d131d,d231d,diff13,diff23/]) + ctr = ctr+1 + end if + + res = True ; t-test plot (3D vars) + res = set11ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + +; res@gsnSpreadColors = False + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = sig_lvl ; user defined sig level to view + res@cnFillColors = (/"red","white"/) ; only have red squares + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.245 + res@tiMainString = "T-Test of two Case means at each grid point" + res@gsnLeftString = "Cells are significant at " + sig_lvl +" level" + res@gsnRightString = "" + res@gsnCenterString = "" + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + res@tiMainOffsetYF = 0 + + plot(ctr) = gsn_csm_contour_map(wks,prob,res) ; will be sig test + delete(res) + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; panel plot + nrows = 3 + ncols = 3 + else + nrows = 2 + ncols = 2 + end if + pres@txString = seasons(n)+" "+vars(i)+" ("+useUnits+")" ; " + gsn_panel(wks,plot,(/nrows,ncols/),pres) ; draw panel plot + else +;************************************************************************* +; 4D VARIABLES +;************************************************************************* +; calculate 4D average + var_avg_1 = dim_avg_n_Wrap(x1(:,:nlev1-1,:,:),0) ; time average + var_avg_1_intp = dim_avg_n_Wrap(x1_interp(:,:nlev1-1,:,:),0) ; time average + var_avg_2 = dim_avg_n_Wrap(x2(:,:nlev1-1,:,:),0) ; time average + var_avg_2_intp = dim_avg_n_Wrap(x2_interp(:,:nlev1-1,:,:),0) ; time average + + if (isdim(x1_interp,"levlak") )then + nlev1 = nlevlak1 + end if + if (isdim(x2_interp,"levlak") )then + nlev2 = nlevlak2 + end if + + if (nlev1 .ne. nlev2) then + print("Set 2 Failed: number of soil levels is unequal: " + nlev1 + " vs " + nlev2) + end if + + karr = (/0,4,9/) + do lev=0,2 ; assume both cases have same number of levels. + k = karr(lev) + res = True + res = set11ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if +; open postscript file (4D) + if (isvar("wks")) then + delete(wks) + end if +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) +;; print((/"Fullname: "+wkdir + "set11_" + seasons(n)+"_"+vars(i)+"_"+k/)) + wks = gsn_open_wks(plot_type,wkdir + "set11_" + seasons(n)+"_"+vars(i)+"_"+k) + if (isvar("cmap")) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap + gsn_define_colormap(wks,cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) + res@cnFillPalette = cmap1(13:240,:) + end if + + min1 = min(var_avg_1(k,:,:)) + max1 = max(var_avg_1(k,:,:)) + + if (expFlag .eq. True) then ; set explicit contour levels (4D) + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + st = "cn_Level"+k ; get cn levels for a specific levsoi + res@cnLevels = info@$st$ + else + maxLevel = 9 + x = ndtooned(var_avg_1(k,:,:)) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin1 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax1 = x(iHigh) + + delete(x) + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + + res@pmLabelBarHeightF = 0.085 + res@pmLabelBarWidthF = 0.4 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = -.01 + res@tiMainString = cases(0) + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + res@gsnCenterString = "(yrs " + yrs_ave1 +")" + + plot(0) = gsn_csm_contour_map(wks,var_avg_1(k,:,:),res) ; 4D case 1 plot + +; set case 2 titles (4D) +; Note: Don't delete res here - save until after obs are posted. + + delete(min1) + delete(max1) + + res@tiMainString = cases(1) + res@gsnCenterString = "(yrs " + yrs_ave2 +")" + plot(1) = gsn_csm_contour_map(wks,var_avg_2(k,:,:),res) ; 4D case 2 plot + delete(res) + + res = True + res = set11ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; diff plot (4D) + if (isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag12.rgb") ; read in colormap ( increase color resolution for difference maps) + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + if (colormap.eq.1) then + res@cnFillPalette = cmap1(13:240,:) + end if + + diff = var_avg_1_intp ; trick to maintain meta data + diff(k,:,:) = var_avg_1_intp(k,:,:) - var_avg_2_intp(k,:,:) + res@tiMainString = "Case 1 - Case 2" + mindiff = min(diff(k,:,:)) + maxdiff = max(diff(k,:,:)) + + if (expFlag .eq. True) then + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + st = "cn_Level"+k+"Diff" ; get cn levels for a specific levsoi + res@cnLevels = info@$st$ + else + sd = dim_stddev(ndtooned(diff)) + + if (vars(i).eq."PREC") then + useMin1 = -2.*sd ; = mindiff + useMax1 = 2.*sd ; = maxdiff + else + useMin1 = -2.5*sd ; = mindiff + useMax1 = 2.5*sd ; = maxdiff + end if + if (useMax1 .gt. maxdiff) then + useMin1 = -sd/5. + useMax1 = sd/5. + end if + maxLevel = 13 + + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + res@pmLabelBarHeightF = 0.085 + res@pmLabelBarWidthF = 0.4 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = -.01 + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + res@tiMainString = cases(0) + res@gsnCenterString = "- "+cases(1) + plot(2) = gsn_csm_contour_map(wks,diff(k,:,:),res) + delete(res) + delete(diff) + + res = True ; T-Test plot (4D) + res = set11ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + var_variance_1 = dim_variance_n_Wrap(x11_interp(:,:nlev1-1,:,:),0) + var_variance_2 = dim_variance_n_Wrap(x22_interp(:,:nlev2-1,:,:),0) + + if (any(var_variance_1 .le. 0)) then ;set variances < 0 to missing so don't divide by zero + var_variance_1 = mask(var_variance_1,(var_variance_1 .gt. 0),True) + end if + if (any(var_variance_2 .le. 0)) then + var_variance_2 = mask(var_variance_2,(var_variance_2 .gt. 0),True) + end if + + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = sig_lvl ; user defined sig level to view + res@cnFillColors = (/"red","white"/) ; only have red squares + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.245 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = 0 + res@tiMainString = "T-Test of two Case means at each grid point" + res@gsnCenterString = " " + res@gsnRightString = " " + res@gsnLeftString = "Cells are significant at " + sig_lvl +" level" + plot(3) = gsn_csm_contour_map(wks,prob(k,:,:),res) ; will be sig test + delete(res) + pres@txString = seasons(n)+" "+vars(i)+" Level " + k+" ("+useUnits+")" ; " + gsn_panel(wks,plot,(/2,2/),pres) ; draw panel plot + end do ; level loop + end if ; 4d variables + delete([/x1,x11,x2,x1_interp,x2_interp,x11_interp,x22_interp,x22,prob,var_avg_1,var_avg_2/]) + if (isvar("var_avg_3") ) then + delete(var_avg_3) + end if + if (isvar("cmap1") ) then + delete(cmap1) + end if + delete(var_avg_1_intp) + delete(var_avg_2_intp) + delete(var_variance_1) + delete(var_variance_2) + delete(plot) + else + print("Variable " + vars(i) + " does not exist.") + end if ; plot_check loop + if (isvar("useUnits")) then + delete(useUnits) + end if + if (isvar("lon3")) then + delete(lon3) + end if + if (isvar("lat3")) then + delete(lat3) + end if + end do ; end variable loop + end do ; seasons loop +; remove error file if program completes successfully. + filo = wkdir +"/set11_error_file" + system("/bin/rm -f " + filo) +end diff --git a/lnd_diag/model1-model2/set_11_seas.ncl b/lnd_diag/model1-model2/set_11_seas.ncl new file mode 100755 index 00000000..9a1dcdd0 --- /dev/null +++ b/lnd_diag/model1-model2/set_11_seas.ncl @@ -0,0 +1,1437 @@ +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$DIAG_SHARED/$VAR_MASTER" +load "$DIAG_SHARED/lnd_func.ncl" +load "$DIAG_SHARED/find_var_with_alts.ncl" +;************************************************ +; Modified by Sheri Mickelson - March 2013 +; The seasonal loop for set_2 was moved to the +; swift script to run the seasons in parallel. +;************************************************ +begin + print ("=========================================") + print ("Starting: set_2.ncl") + print ("Start Time: "+systemfunc("date") ) + print ("=========================================") + ptmpdir1 = getenv("PTMPDIR_1") + ptmpdir2 = getenv("PTMPDIR_2") + wkdir = getenv("WKDIR") + obsdata = getenv("OBS_DATA") + obs_res = getenv("OBS_RES") + paleo = getenv("paleo") + plotObs = getenv("plotObs") + useCommon1 = getenv("UseCommonName_1") + useCommon2 = getenv("UseCommonName_2") + raster = getenv("raster") + plot_type = getenv("PLOTTYPE") + land_mask = getenv("land_mask1") + user_defined = getenv("expContours") + cn = getenv("CN") + colormap = getenv("colormap") + projection=getenv("projection") + season = getenv("season") + + flandmask = stringtofloat(land_mask) + + nyrs1 = stringtointeger(getenv("clim_num_yrs_1")) + nyrs2 = stringtointeger(getenv("clim_num_yrs_2")) + +;************************************************************************** +; Cut off top and bottom percent to tighten contour intervals. 12/06 nanr +;************************************************************************** + pCut = 5 + pLow = 0.05 + pHigh = 0.95 + +; the set lists contains two columns of information. This comes into +; NCL as one long string that we need to parse out. + tmp = stringtocharacter(asciiread(wkdir+"/master_set2.txt",-1,"string")) + nvar = dimsizes(tmp(:,0)) + scale_type = new(nvar,"string") + vars = new(nvar,"string") + c13Flag = new(nvar,"string") + dynContour = new(nvar,"string") + do i=0,nvar-1 + scale_type(i) = charactertostring(tmp(i,0:12)) + dynContour(i) = charactertostring(tmp(i,13)) + vars(i) = charactertostring(tmp(i,16:)) + c13Flag(i) = charactertostring(tmp(i,16:19)) ; derived variable prefix + end do + +;************************************************* +; get case names and create filenames to read in +;************************************************* + sig_lvl = stringtofloat(getenv("sig_lvl") ) + zoom = stringtointeger(getenv("reg_contour")); # (1 = SUB, 0 = GLOBAL) + if (zoom.eq.1)then + min_lat = stringtofloat(getenv("min_lat")) + max_lat = stringtofloat(getenv("min_lat")) + min_lon = stringtofloat(getenv("min_lon")) + max_lon = stringtofloat(getenv("min_lon")) + end if +; seasons = (/"DJF","JJA","MAM","SON","ANN"/) +;************************************************* +; common plot resources +;************************************************* + pres = True ; panel only resources + +; read in case strings + cases = new(3,string) + prefix = new(2,string) + do m = 0,1 + if (m .eq. 0) then + useFlag = useCommon1 + end if + if (m .eq. 1) then + useFlag = useCommon2 + end if + if (useFlag .eq. "True") then + name1 = "commonName_"+(m+1) + else + name1 = "caseid_"+(m+1) + end if + name2 = "prefix_"+(m+1) + cases(m) = getenv(name1) + prefix(m) = getenv(name2) + end do + +; loop over seasons + +; do n = 0,dimsizes(seasons)-1 + print("Processing season " + season) + in1 = addfile(ptmpdir1+"/"+prefix(0)+"_"+season+"_climo.nc","r") ; used for first 3 plots + in2 = addfile(ptmpdir2+"/"+prefix(1)+"_"+season+"_climo.nc","r") ; used for first 3 plots + in11 = addfile(ptmpdir1+"/"+prefix(0)+"_"+season+"_means.nc","r") + in22 = addfile(ptmpdir2+"/"+prefix(1)+"_"+season+"_means.nc","r") + ; get degrees of freedom for t test + if (isatt(in11,"num_yrs_averaged")) then + dof_1 = in11@num_yrs_averaged + else + dof_1 = getfilevardimsizes(in11, "time") + end if + if (isatt(in22,"num_yrs_averaged")) then + dof_2 = in22@num_yrs_averaged + else + dof_2 = getfilevardimsizes(in22, "time") + end if +; open observation files + if (obs_res .eq. "T85") then + ptr_wm = addfile(obsdata+"/T85_WILLMOTT_"+season+"_climo.nc","r") ; temp and precip + else + ptr_wm = addfile(obsdata+"/WILLMOTT_"+season+"_climo.nc","r") ; temp and precip + end if + if (obs_res .eq. "T85") then + ptr_grdc = addfile(obsdata+"/T85_GRDC_"+season+"_climo.nc","r") ; runoff + ptr_fd = addfile(obsdata+"/T85_FOSTERDAVY_"+season+"_climo.nc","r") ; snow depth + else + ptr_grdc = addfile(obsdata+"/GRDC_"+season+"_climo.nc","r") ; runoff + ptr_fd = addfile(obsdata+"/FOSTERDAVY_"+season+"_climo.nc","r") ; snow depth + end if + ptr_scf = addfile(obsdata+"/NOAA_AVHRR_SNOWF_"+season+"_climo.070502.nc","r") ; snow cover + ptr_swe = addfile(obsdata+"/CMC_SWE_"+season+"_climo.070502.nc","r") ; snow depth + ptr_sd = addfile(obsdata+"/CMC_SNOWD_"+season+"_climo.070503.nc","r") ; swe + ptr_alb = addfile(obsdata+"/T42_MODIS_"+season+"_climo.070523.nc","r") ; albedo + ptr_asa = addfile(obsdata+"/modisradweighted_"+season+"_071105.nc","r") ; albedo + ptr_lhf = addfile(obsdata+"/MR_LHF_0.9x1.25_"+season+"_climo.nc","r") ; latent heat flux + ptr_gpp = addfile(obsdata+"/MR_GPP_0.9x1.25_"+season+"_climo.nc","r") ; gross primary production + ptr_lai = addfile(obsdata+"/MODIS_LAI_"+season+"_climo.nc","r") ; leaf area index + +; extract years averaged for plotting + if (isatt(in1,"yrs_averaged")) then + yrs_ave1 = in1@yrs_averaged + end if + if (isatt(in2,"yrs_averaged")) then + yrs_ave2 = in2@yrs_averaged + end if + +; check to make sure levels are the same for both model cases + nlev1 = 0 + nlev2 = 0 + if (isfilevar(in1,"levsoi")) then + nlev1 = getfilevardimsizes(in1,"levsoi") + end if + if (isfilevar(in2,"levsoi")) then + nlev2 = getfilevardimsizes(in2,"levsoi") + end if + if (isfilevar(in1,"levgrnd")) then + nlev1 = getfilevardimsizes(in1,"levgrnd") + end if + if (isfilevar(in2,"levgrnd")) then + nlev2 = getfilevardimsizes(in2,"levgrnd") + end if + if (nlev1 .eq. 0 .or. nlev2 .eq. 0) + print("FATAL ERROR Set2: nlev1 = 0") + exit + end if + + if (isfilevar(in1,"TLAKE") ) then + if (isfilevar(in1,"levlak") ) then + nlevlak1 = getfilevardimsizes(in1,"levlak") + end if + end if + if (isfilevar(in2,"TLAKE") ) then + if (isfilevar(in2,"levlak") ) then + nlevlak2 = getfilevardimsizes(in2,"levlak") + end if + end if + + lon1 = in1->lon + lon2 = in2->lon + lat1 = in1->lat + lat2 = in2->lat + nlon1 = dimsizes(lon1) + nlon2 = dimsizes(lon2) + nlat1 = dimsizes(lat1) + nlat2 = dimsizes(lat2) + time1 = in1->time + time2 = in2->time + +; extract landfrac if paleo file + if (paleo .eq. "True") then + flandmask = 0 + landfrac = in1->landfrac(:,:) + oro = new((/nlat1,nlon1/),integer) + oro = 1 + oro@_FillValue = 0 + oro = where(landfrac .gt. 0.0, 1,0) +; fname = wkdir+prefix(0) +; fname = wkdir+"basename" + fname = wkdir+getenv(name1) + paleo_outline(oro,lat1,lon1,flandmask, fname ) + res = True + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; truncate soil layers if needed + if (nlev1 .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix(0)+" nlev: " + nlev1 +"]") + nlev1 = 10 + end if + if (nlev2 .gt. 10) then + print("Truncating soil levels to 10 levels [caseid: "+prefix(1)+" nlev: " + nlev2 +"]") + nlev2 = 10 + end if + +; loop over variables + + snowFlag = 0 + do i=0,dimsizes(vars)-1 + + if (paleo .ne. "True") then + if (snowFlag .eq. 1) then ; run SNOWDP twice to see both obs datasets + i = i-1 + end if + if (vars(i) .eq. "SNOWDP") then + snowFlag = snowFlag + 1 + end if + end if + print("Processing variable " + vars(i)) + + plot_check_1 = True + plot_check_11 = True + plot_check_2 = True + plot_check_22 = True + + info = var_init(vars(i)) ; read in case 1 variable + filevar = find_var_with_alts(in1, vars(i), info) +; now filevar is the name of the variable in the file (either +; vars(i) or one of its alternate names); if this variable was +; not found in the file, then filevar will be "" + + plot_check = True + if (filevar .ne. "")then + if (isvar("x1")) then + delete(x1) + end if + if (isvar("x11")) then + delete(x11) + end if + if (c13Flag(i) .eq. "C13_") then + x1 = derive_del13C(vars(i),in1, scale_type(i),0,2) + x11 = derive_del13C(vars(i),in11,scale_type(i),0,2) + else + x1 = in1 ->$filevar$ + x11 = in11->$filevar$ + plot_check_1 = True + plot_check_11 = True + delete(filevar) + end if + else + if (info@derivedVariable .eq. True) then + x1 = derive_land(vars(i),in1) + x11 = derive_land(vars(i),in11) + else + print("variable "+ vars(i)+ " is not a defined variable.") + plot_check_1 = False + plot_check_11 = False + continue + end if + end if + + info = var_init(vars(i)) ; read in case 2 variable + filevar = find_var_with_alts(in2, vars(i), info) +; now filevar is the name of the variable in the file (either +; vars(i) or one of its alternate names); if this variable was +; not found in the file, then filevar will be "" + + plot_check = True + if (filevar .ne. "")then + if (isvar("x2")) then + delete(x2) + end if + if (isvar("x22")) then + delete(x22) + end if + if (c13Flag(i) .eq. "C13_") then + x2 = derive_del13C(vars(i),in2, scale_type(i),0,2) + x22 = derive_del13C(vars(i),in22,scale_type(i),0,2) + else + x2 = in2 ->$filevar$ + x22 = in22->$filevar$ + plot_check_2 = True + plot_check_22 = True + delete(filevar) + end if + else + if (info@derivedVariable .eq. True) then + x2 = derive_land(vars(i),in2) + x22 = derive_land(vars(i),in22) + else + print("variable "+ vars(i)+ " is not a defined variable.") + plot_check_2 = False + plot_check_22 = False + continue + end if + end if + + obsFlag = 0 ; read in observations (if present) (1 = on; 0 = off) + if (isvar("x3") ) then + delete(x3) + end if + if (paleo .ne. "True") then + if (vars(i) .eq. "TSA" ) then + x3 = ptr_wm->$vars(i)$ + x3 = x3 - 273.15 ; convert K to C + k2c = True + lon3 = ptr_wm->lon + lat3 = ptr_wm->lat + cases(2) = ptr_wm@case_id + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "PREC" ) then + x3 = ptr_wm->PREC + lon3 = ptr_wm->lon + lat3 = ptr_wm->lat + cases(2) = ptr_wm@case_id + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "TOTRUNOFF" ) then + x3 = ptr_grdc->RUNOFF + lon3 = ptr_grdc->lon + lat3 = ptr_grdc->lat + cases(2) = "GRDC" + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "SNOWDP" .and. snowFlag .eq. 1) then + x3 = ptr_fd->$vars(i)$ + lon3 = ptr_fd->lon + lat3 = ptr_fd->lat + cases(2) = "FOSTERDAVY" + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "FSNO" ) then ; fractional snow cover + x3 = ptr_scf->SCF + x3 = x3 * 0.01 ; convert from percent to 0-1 + lon3 = ptr_scf->lon + lat3 = ptr_scf->lat + cases(2) = "NOAA AVHRR (1967-2003)" + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "H2OSNO" ) then ; fractional snow cover + x3 = ptr_swe->SWE + lon3 = ptr_scf->lon + lat3 = ptr_scf->lat + cases(2) = "CMC (1980-1996)" + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "SNOWDP" .and. snowFlag .eq. 2) then + x3 = ptr_sd->SNOWD + lon3 = ptr_sd->lon + lat3 = ptr_sd->lat + cases(2) = "CMC (1980-1996)" + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "VBSA") then + x3 = ptr_alb->VBSA(0,:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_alb->lat + lon3 = ptr_alb->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "NBSA") then + x3 = ptr_alb->NBSA(0,:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_alb->lat + lon3 = ptr_alb->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "VWSA") then + x3 = ptr_alb->VWSA(0,:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_alb->lat + lon3 = ptr_alb->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "NWSA") then + x3 = ptr_alb->NWSA(0,:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_alb->lat + lon3 = ptr_alb->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "ASA") then + x3 = ptr_asa->BRDALB(0,:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_asa->lat + lon3 = ptr_asa->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "LHEAT") then + x3 = ptr_lhf->LHF(:,:) + cases(2) = "FLUXNET (1982-2008)" + lat3 = ptr_lhf->lat + lon3 = ptr_lhf->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if ((vars(i) .eq. "FPSN" .and. cn .eq. 0) .or. vars(i) .eq. "GPP") then + x3 = ptr_gpp->GPP(:,:) + cases(2) = "FLUXNET (1982-2008)" + lat3 = ptr_gpp->lat + lon3 = ptr_gpp->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + if (vars(i) .eq. "TLAI") then + x3 = ptr_lai->TLAI(:,:) + cases(2) = "MODIS (2001-2003)" + lat3 = ptr_lai->lat + lon3 = ptr_lai->lon + x3@_FillValue = 1e30 + obsFlag = 1; + else + x3 = 0.0 + obsFlag = 0; + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if ; paleo + +; if (paleo .eq. "True") then +; obsFlag = 0 +; end if + + if (obsFlag .eq. 1) then + nlon3 = dimsizes(lon3) + nlat3 = dimsizes(lat3) + end if + + if (x1@units .eq. "K") then + x1 = x1 - 273.15 ; convert K to C + x2 = x2 - 273.15 ; convert K to C + end if + + ; catch NaNs + if (isvar("x1") ) then + x1 = catchNaN(x1) + end if + if (isvar("x11") ) then + x11 = catchNaN(x11) + end if + if (isvar("x2") ) then + x2 = catchNaN(x2) + end if + if (isvar("x22") ) then + x22 = catchNaN(x22) + end if + + if (all(ismissing(x1)))then + print("variable "+ vars(i)+ " contains all missing values.") + plot_check_1 = False + delete(x1) + end if + if (all(ismissing(x11)))then + print("variable "+ vars(i)+ " not found") + plot_check_11 = False + delete(x11) + end if + if (all(ismissing(x2)))then + print("variable "+ vars(i)+ " not found") + plot_check_2 = False + delete(x2) + end if + if (all(ismissing(x22)))then + print("variable "+ vars(i)+ " not found") + plot_check_22 = False + delete(x22) + end if + +; ---------------------- +; start plotting process if variables exist +; ---------------------- + if (plot_check_1 .eq. True .and. plot_check_11 .eq. True .and. plot_check_2 .eq. True .and. plot_check_22 .eq. True) then + x1 = scale_var(x1, scale_type(i),info) + x11 = scale_var(x11,scale_type(i),info) + x2 = scale_var(x2, scale_type(i),info) + x22 = scale_var(x22,scale_type(i),info) + +; flip longitudes to -180 to 180 if needed + if (min(x2&lon) .ge. 0 .and. min(x1&lon) .lt. 0) then + x2 = lonFlip(x2) + x22 = lonFlip(x22) + end if + if (min(x1&lon) .ge. 0 .and. min(x2&lon) .lt. 0) then + x1 = lonFlip(x1) + x11 = lonFlip(x11) + end if + +; process observations if present. Case 1 as root. +; flip x3 longitudes to -180 to 180 if needed + if (obsFlag .eq. 1) ; (1 = on; 0 = off) + if (min(x1&lon) .lt. 0 .and. min(x3&lon) .ge. 0) then + if (lon3(0) .ge. 0.) then + x3 = lonFlip(x3) + end if + end if + end if + + rank1 = dimsizes(dimsizes(x1)) ; calculate size + rank2 = dimsizes(dimsizes(x2)) + + useCase1 = True + useCase2 = False + if (nlat1 .ne. nlat2) then + if (nlat1 .gt. nlat2) then ; interpolate x1 to x2 grid + x1_interp = linint2_Wrap(lon1,lat1,x1,True,lon2,lat2,0) + x2_interp = x2 + x11_interp = linint2_Wrap(lon1,lat1,x11,True,lon2,lat2,0) + x22_interp = x22 + useCase1 = False + useCase2 = True + use_nlat = nlat2 + use_nlon = nlon2 + else + x2_interp = linint2_Wrap(lon2,lat2,x2,True,lon1,lat1,0) + x1_interp = x1 + + x22_interp = linint2_Wrap(lon2,lat2,x22,True,lon1,lat1,0) + x11_interp = x11 + use_nlat = nlat1 + use_nlon = nlon1 + end if + else + use_nlon = nlon1 + use_nlat = nlat1 + x1_interp = x1 + x2_interp = x2 + x11_interp = x11 + x22_interp = x22 + end if + +; interpolate obs to new grid if necessary + if (obsFlag .eq. 1) ; (1 = on; 0 = off) + if (nlat1 .ne. nlat3 .or. nlat2 .ne. nlat3) then + if (useCase1 .eq. True) then + x3_interp = linint2_Wrap(lon3,lat3,x3,True,lon1,lat1,0) ; interpolate x3 to x1 grid + end if + if (useCase2 .eq. True) then + x3_interp = linint2_Wrap(lon3,lat3,x3,True,lon2,lat2,0) ; interpolate x3 to x2 grid + end if + else + x3_interp = x3 + end if +; different plots for observations +; plot = new(9,graphic) + else +; x1_interp = x1 ; fix, per Keith Oleson +; plot = new(4,graphic) + end if ; end observations + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then + plot = new(9,graphic) + else + plot = new(4,graphic) + end if + + if (x1@units .eq. "K") then + useUnits = "C" ; assume same units for x1 and x2 + else + useUnits = x1@units ; assume same units for x1 and x2 + end if +;******************************************************************* +; 3D VARIABLES +;******************************************************************* + if (rank1.eq.3)then + +; calculate time average + var_avg_1 = dim_avg_n_Wrap(x1,0) ; time average + var_avg_2 = dim_avg_n_Wrap(x2,0) ; time average + var_avg_1_intp = dim_avg_n_Wrap(x1_interp,0) ; time average + var_avg_2_intp = dim_avg_n_Wrap(x2_interp,0) ; time average + + if (obsFlag .eq. 1) then ; (1 = obs; 0 = no obs) + x3_interp = mask(x3_interp, (x1_interp(0,:,:) .eq. x1_interp@_FillValue), False) ; remove non-land points + var_avg_3 = x3_interp + var_avg_3_intp = x3_interp + min3 = min(var_avg_3) + max3 = max(var_avg_3) + delete(x3_interp) + end if + + min1 = min(var_avg_1) + min2 = min(var_avg_2) + max1 = max(var_avg_1) + max2 = max(var_avg_2) + + if (min1 .le. min2) then + if (isvar("min12")) then + delete(min12) + end if + min12 = min1 + else + if (isvar("min12")) then + delete(min12) + end if + min12 = min2 + end if + + if (max1 .ge. max2) then + if (isvar("max12")) then + delete(max12) + end if + max12 = max1 + else + if(isvar("max12")) then + delete(max12) + end if + max12 = max2 + end if + +;************************************************************************** +; Set contour Levels: Dynamic vs pre-defined contours +;************************************************************************** + +; dynContour: 0=dynamic;1=explicit + if (info@cn_Explicit .eq. True .and. dynContour(i) .eq. 1 .and. user_defined .eq. 1) then + expFlag = True + else + expFlag = False + end if + + res = True + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + if (expFlag .eq. True) then ; explicitly defined contours. + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_Levels ; contour levels + else + if (c13Flag(i) .eq. "C13_") then + useMin12 = -40 ; hardcode boundaries to expectations + useMax12 = 0 ; hardcode boundaries to expectations + else + x = ndtooned(var_avg_1) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin12 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax12 = x(iHigh) + delete(x) + end if + maxLevel = 9 + + if (vars(i) .eq. "SNOWDP") then + useMin12 = 0. + useMax12 = 1. + end if + if (vars(i) .eq. "H2OSNO") then + useMin12 = 0. + useMax12 = 500. + end if + if (vars(i) .eq. "SNOWICE") then + useMin12 = 0. + useMax12 = 500. + end if + if (vars(i) .eq. "TWS") then + useMin12 = 4000. + useMax12 = 10000. + end if + + mnmxint = nice_mnmxintvl( useMin12, useMax12, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; explicit contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + + delete(useMin12) + delete(useMax12) + end if + +; open postscript file and choose colormap + if (isvar("wks")) then + delete(wks) + end if + if (vars(i) .eq. "SNOWDP") then + if (snowFlag .eq. 1) then + wks = gsn_open_wks(plot_type,wkdir + "set2_" + season+"_"+vars(i)+"_FOSTERDAVY") + else + wks = gsn_open_wks(plot_type,wkdir + "set2_" + season+"_"+vars(i)+"_CMC") + end if + else + wks = gsn_open_wks(plot_type,wkdir + "set2_" + season+"_"+vars(i)) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap + gsn_define_colormap(wks,cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) + res@cnFillPalette = cmap1(13:240,:) + end if + + res@tiMainString = cases(0) ; set case 1 titles + res@gsnCenterString = "(yrs " + yrs_ave1 + ")" + min1 = min(var_avg_1) + max1 = max(var_avg_1) + + res@gsnLeftString = "" ; vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + delete(min1) + delete(max1) + + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + + ctr = 0 + plot(ctr) = gsn_csm_contour_map(wks,var_avg_1,res) ; case 1 plot + ctr = ctr+1 + +; Note: Don't delete res here - save until after obs are posted. + res@tiMainString = cases(1) ; set case 2 titles + res@gsnCenterString = " (yrs " + yrs_ave2 + ")" + if (vars(i) .eq. "CH4PROD" .or. vars(i) .eq. "CH4_SURF_EBUL_SAT" .or. vars(i) .eq. "CH4_SURF_EBUL_UNSAT") then + if (isatt(res,"cnLabelBarEndStyle")) then + if (res@cnLabelBarEndStyle.eq."IncludeMinMaxLabels") then + res@cnLabelBarEndStyle = "IncludeOuterBoxes" ; temporarily turn off minmax labels. + end if + end if + else + if (isatt(res,"cnLabelBarEndStyle")) then + res@cnLabelBarEndStyle = "IncludeMinMaxLabels" + end if + end if + plot(ctr) = gsn_csm_contour_map(wks,var_avg_2,res) ; case 2 plot + ctr = ctr+1 + +; Note: Don't delete res here - save until after obs are posted. + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; OBS plot (if present) + res = True + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + res@tiMainString = cases(2) + res@gsnCenterString = " " + delete(min3) + delete(max3) + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + plot(ctr) = gsn_csm_contour_map(wks,var_avg_3,res) + ctr = ctr+1 + end if + delete(res) ; delete res in time for difference plots. + +; difference plots + if (isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag12.rgb") ; read in colormap (increase color resolution for difference maps) + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; OBS plot (if present) + res = True + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + res@cnFillPalette = cmap1(13:240,:) + + +; Case 1 - Obs + diff = var_avg_1_intp ; trick to maintain meta data + check = isdouble(var_avg_3_intp) + if (check .eq. True) then + diff = var_avg_1_intp - doubletofloat(var_avg_3_intp) + else + diff = var_avg_1_intp - var_avg_3_intp + end if + delete(check) + mindiff = min(diff) + maxdiff = max(diff) + res@tiMainString = cases(0) + res@gsnCenterString = "- Observations" +; set contour levels ( use larger contour intervals for Model vs Obs ) + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiffObs ; contour levels + + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(diff) + delete(mindiff) + delete(maxdiff) + ctr = ctr+1 + +; Case 2 - Obs + diff = var_avg_1_intp ; trick to maintain meta data + diff = var_avg_2_intp - doubletofloat(var_avg_3_intp) + res@tiMainString = cases(1) + res@gsnCenterString = "- Observations" + mindiff = min(diff) + maxdiff = max(diff) + + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiffObs ; contour levels + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(res) + delete(diff) + delete(mindiff) + delete(maxdiff) + ctr = ctr+2 + end if + +; Case 1 - Case 2 + diff = var_avg_1_intp ; trick to maintain meta data + diff = var_avg_1_intp - var_avg_2_intp + mindiff = min(diff) + maxdiff = max(diff) + +; set contour levels + res = True + res = set2ResMvM(res) + res@cnFillPalette = cmap1(13:240,:) + + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + if (expFlag .eq. False) then + sd = dim_stddev(ndtooned(diff)) + + if (vars(i).eq."PREC") then + useMin1 = -2.*sd ; = mindiff + useMax1 = 2.*sd ; = maxdiff + else + useMin1 = -2.5*sd ; = mindiff + useMax1 = 2.5*sd ; = maxdiff + end if + + if (c13Flag(i) .eq. "C13_") then + useMin1 = -40 ; hardcode boundaries to expectations + useMax1 = 0 ; hardcode boundaries to expectations + end if + if (mindiff.eq.maxdiff) then ; two cases are identical + useMin1 = -6 + useMax1 = 6 + diff = 0. + if (isatt(res,"cnLabelBarEndStyle")) then + if (res@cnLabelBarEndStyle.eq."IncludeMinMaxLabels") then + res@cnLabelBarEndStyle = "IncludeOuterBoxes" ; temporarily turn off minmax labels. + end if + end if + end if + maxLevel = 13 + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete([/useMin1,useMax1/]) + else + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + res@cnLevels = info@cn_LevelsDiff ; contour levels + end if + + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + res@tiMainString = cases(0) + res@gsnCenterString= "- "+cases(1) + + res@gsnLeftString = "" ; vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ; useUnits ; assume same units for x1 and x2 + plot(ctr) = gsn_csm_contour_map(wks,diff,res) + delete(res) + ctr = ctr+1 + delete([/diff,mindiff,maxdiff/]) + + res = True ; T-Test plots + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + var_variance_1 = dim_variance_n_Wrap(x11_interp,0) ; time average + var_variance_2 = dim_variance_n_Wrap(x22_interp,0) ; time average + +; set variances < 0 to missing so don't divide by zero + if (any(var_variance_1 .le. 0)) then + var_variance_1 = mask(var_variance_1,(var_variance_1 .gt. 0),True) + end if + if (any(var_variance_2 .le. 0)) then + var_variance_2 = mask(var_variance_2,(var_variance_2 .gt. 0),True) + end if +; calculate t-test + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then + + pfinal = new((/use_nlat,use_nlon/),"float") + + diff13 = abs(var_avg_1_intp - doubletofloat(var_avg_3_intp)) + diff23 = abs(var_avg_2_intp - doubletofloat(var_avg_3_intp)) + delete(var_avg_3_intp) + copy_VarCoords(prob,diff13) + copy_VarCoords(prob,diff23) + + p1d = ndtooned(prob) + d131d = ndtooned(diff13) + d231d = ndtooned(diff23) + + do c=0,dimsizes(p1d)-1 + if (.not. ismissing(d131d(c)) .and. .not. ismissing(d231d(c) )) then + if (.not. ismissing(p1d(c))) then + if (d131d(c) .gt. d231d(c) ) then ; case 2 is closer to obs + if (p1d(c) .lt. sig_lvl ) then + p1d(c) = 10 ; blue + end if + else ; case 1 is closer to obs + if (p1d(c) .lt. sig_lvl ) then + p1d(c) = 5 ; red + end if + end if + end if + end if + end do + + pfinal = onedtond(p1d,dimsizes(prob)) + copy_VarCoords(prob,pfinal) + + res@gsnCenterString = "" + res@gsnRightString = "" + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.3 + res@tiMainString = cases(0)+" (green)" + res@gsnCenterString = cases(1)+" (red)" + ; res@tiMainString = "Case1+ (green) and Case2+ (red) relative to obs" + res@gsnLeftString = "Model relative to Obs" +; res@gsnSpreadColors = False + res@cnFillMode = "RasterFill" + res@cnLevelSelectionMode = "ExplicitLevels" + ; nanr 12/01/07 + if (isvar("cnLevels")) then + delete(res@cnLevels) + end if + res@cnLevels = (/2,7/) ; user defined sig level to view + res@cnFillColors = (/"white","green","red"/) ; white=NoChange; green=Case1 better; red=Case2 better +; res@cnFillColors = (/0,11,11/) ; only have red squares + + + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + end if + + plot(ctr) = gsn_csm_contour_map(wks, pfinal,res) ; will be sig test + delete([/res,pfinal,p1d,d131d,d231d,diff13,diff23/]) + ctr = ctr+1 + end if + + res = True ; t-test plot (3D vars) + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + +; res@gsnSpreadColors = False + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = sig_lvl ; user defined sig level to view + res@cnFillColors = (/"red","white"/) ; only have red squares + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.245 + res@tiMainString = "T-Test of two Case means at each grid point" + res@gsnLeftString = "Cells are significant at " + sig_lvl +" level" + res@gsnRightString = "" + res@gsnCenterString = "" + if (projection.eq.1) then ; projection = CE + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; set differing font heights/labelbar placement depending on whether there is 4 or 9 plots + res@gsnLeftStringFontHeightF = 0.026 ; 9 plots + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.024 ; 4 plots + res@gsnCenterStringFontHeightF = 0.028 + res@tiMainFontHeightF = 0.028 + res@lbLabelFontHeightF = 0.024 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + else + if (obsFlag.eq.1 .and. plotObs .eq. 1) then ; projection = Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + res@tiMainOffsetYF = -.0075 + else + res@gsnLeftStringFontHeightF = 0.016 + res@gsnCenterStringFontHeightF = 0.020 + res@tiMainFontHeightF = 0.020 + res@lbLabelFontHeightF = 0.016 + res@pmLabelBarOrthogonalPosF = 0.05 + res@tiMainOffsetYF = -.01 + end if + end if + res@tiMainOffsetYF = 0 + + plot(ctr) = gsn_csm_contour_map(wks,prob,res) ; will be sig test + delete(res) + + if (obsFlag .eq. 1 .and. plotObs .eq. 1) then ; panel plot + nrows = 3 + ncols = 3 + else + nrows = 2 + ncols = 2 + end if + pres@txString = season+" "+vars(i)+" ("+useUnits+")" ; " + gsn_panel(wks,plot,(/nrows,ncols/),pres) ; draw panel plot + else +;************************************************************************* +; 4D VARIABLES +;************************************************************************* +; calculate 4D average + var_avg_1 = dim_avg_n_Wrap(x1(:,:nlev1-1,:,:),0) ; time average + var_avg_1_intp = dim_avg_n_Wrap(x1_interp(:,:nlev1-1,:,:),0) ; time average + var_avg_2 = dim_avg_n_Wrap(x2(:,:nlev1-1,:,:),0) ; time average + var_avg_2_intp = dim_avg_n_Wrap(x2_interp(:,:nlev1-1,:,:),0) ; time average + + if (isdim(x1_interp,"levlak") )then + nlev1 = nlevlak1 + end if + if (isdim(x2_interp,"levlak") )then + nlev2 = nlevlak2 + end if + + if (nlev1 .ne. nlev2) then + print("Set 2 Failed: number of soil levels is unequal: " + nlev1 + " vs " + nlev2) + end if + + karr = (/0,4,9/) + do lev=0,2 ; assume both cases have same number of levels. + k = karr(lev) + res = True + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if +; open postscript file (4D) + if (isvar("wks")) then + delete(wks) + end if + wks = gsn_open_wks(plot_type,wkdir + "set2_" + season+"_"+vars(i)+"_"+k) + if (isvar("cmap")) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap + gsn_define_colormap(wks,cmap) + end if + if (colormap.eq.1) then + gsn_define_colormap(wks,"ncl_default") + cmap1 = gsn_retrieve_colormap(wks) + res@cnFillPalette = cmap1(13:240,:) + end if + + min1 = min(var_avg_1(k,:,:)) + max1 = max(var_avg_1(k,:,:)) + + if (expFlag .eq. True) then ; set explicit contour levels (4D) + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + st = "cn_Level"+k ; get cn levels for a specific levsoi + res@cnLevels = info@$st$ + else + maxLevel = 9 + x = ndtooned(var_avg_1(k,:,:)) + nMsg = num(ismissing(x)) + nGood = num(.not.ismissing(x)) + qsort(x) + + iLow = floattointeger( pLow*nGood) + useMin1 = x(iLow ) + iHigh = floattointeger( pHigh*nGood) + useMax1 = x(iHigh) + + delete(x) + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + + res@pmLabelBarHeightF = 0.085 + res@pmLabelBarWidthF = 0.4 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = -.01 + res@tiMainString = cases(0) + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + res@gsnCenterString = "(yrs " + yrs_ave1 +")" + + plot(0) = gsn_csm_contour_map(wks,var_avg_1(k,:,:),res) ; 4D case 1 plot + +; set case 2 titles (4D) +; Note: Don't delete res here - save until after obs are posted. + + delete(min1) + delete(max1) + + res@tiMainString = cases(1) + res@gsnCenterString = "(yrs " + yrs_ave2 +")" + plot(1) = gsn_csm_contour_map(wks,var_avg_2(k,:,:),res) ; 4D case 2 plot + delete(res) + + res = True + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + +; diff plot (4D) + if (isvar("cmap") ) then + delete(cmap) + end if + if (colormap.eq.0) then + cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag12.rgb") ; read in colormap ( increase color resolution for difference maps) + gsn_define_colormap(wks,cmap) + delete(cmap) + end if + if (colormap.eq.1) then + res@cnFillPalette = cmap1(13:240,:) + end if + + diff = var_avg_1_intp ; trick to maintain meta data + diff(k,:,:) = var_avg_1_intp(k,:,:) - var_avg_2_intp(k,:,:) + res@tiMainString = "Case 1 - Case 2" + mindiff = min(diff(k,:,:)) + maxdiff = max(diff(k,:,:)) + + if (expFlag .eq. True) then + res@cnLevelSelectionMode = "ExplicitLevels" ; explicit contour levels + st = "cn_Level"+k+"Diff" ; get cn levels for a specific levsoi + res@cnLevels = info@$st$ + else + sd = dim_stddev(ndtooned(diff)) + + if (vars(i).eq."PREC") then + useMin1 = -2.*sd ; = mindiff + useMax1 = 2.*sd ; = maxdiff + else + useMin1 = -2.5*sd ; = mindiff + useMax1 = 2.5*sd ; = maxdiff + end if + if (useMax1 .gt. maxdiff) then + useMin1 = -sd/5. + useMax1 = sd/5. + end if + maxLevel = 13 + + mnmxint = nice_mnmxintvl( useMin1, useMax1, maxLevel, False) + res@cnLevelSelectionMode = "ManualLevels" ; ncl defined contour levels + res@cnMinLevelValF = mnmxint(0) + res@cnMaxLevelValF = mnmxint(1) + res@cnLevelSpacingF = mnmxint(2) + delete(useMin1) + delete(useMax1) + end if + res@pmLabelBarHeightF = 0.085 + res@pmLabelBarWidthF = 0.4 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = -.01 + res@gsnLeftString = "" ;vars(i) ; could also use info@longName if we want + res@gsnRightString = "" ;useUnits ; assume same units for x1 and x2 + res@tiMainString = cases(0) + res@gsnCenterString = "- "+cases(1) + plot(2) = gsn_csm_contour_map(wks,diff(k,:,:),res) + delete(res) + delete(diff) + + res = True ; T-Test plot (4D) + res = set2ResMvM(res) + if (paleo .eq. "True") then + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + end if + + var_variance_1 = dim_variance_n_Wrap(x11_interp(:,:nlev1-1,:,:),0) + var_variance_2 = dim_variance_n_Wrap(x22_interp(:,:nlev2-1,:,:),0) + + if (any(var_variance_1 .le. 0)) then ;set variances < 0 to missing so don't divide by zero + var_variance_1 = mask(var_variance_1,(var_variance_1 .gt. 0),True) + end if + if (any(var_variance_2 .le. 0)) then + var_variance_2 = mask(var_variance_2,(var_variance_2 .gt. 0),True) + end if + + prob = var_avg_1_intp ; trick to maintain meta data + prob = ttest(var_avg_1_intp,var_variance_1,dof_1,var_avg_2_intp,var_variance_2,dof_2,True,False) + + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = sig_lvl ; user defined sig level to view + res@cnFillColors = (/"red","white"/) ; only have red squares + res@lbLabelBarOn = False + res@gsnLeftStringParallelPosF = 0.245 + if (projection.eq.1) then ; CE + res@gsnLeftStringFontHeightF = 0.026 + res@gsnCenterStringFontHeightF = 0.030 + res@tiMainFontHeightF = 0.030 + res@lbLabelFontHeightF = 0.026 + res@pmLabelBarOrthogonalPosF = 0.075 + else ; Robinson + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@tiMainFontHeightF = 0.022 + res@lbLabelFontHeightF = 0.018 + res@pmLabelBarOrthogonalPosF = 0.075 + end if + res@tiMainOffsetYF = 0 + res@tiMainString = "T-Test of two Case means at each grid point" + res@gsnCenterString = " " + res@gsnRightString = " " + res@gsnLeftString = "Cells are significant at " + sig_lvl +" level" + plot(3) = gsn_csm_contour_map(wks,prob(k,:,:),res) ; will be sig test + delete(res) + pres@txString = season+" "+vars(i)+" Level " + k+" ("+useUnits+")" ; " + gsn_panel(wks,plot,(/2,2/),pres) ; draw panel plot + end do ; level loop + end if ; 4d variables + delete([/x1,x11,x2,x1_interp,x2_interp,x11_interp,x22_interp,x22,prob,var_avg_1,var_avg_2/]) + if (isvar("var_avg_3") ) then + delete(var_avg_3) + end if + if (isvar("cmap1") ) then + delete(cmap1) + end if + delete(var_avg_1_intp) + delete(var_avg_2_intp) + delete(var_variance_1) + delete(var_variance_2) + delete(plot) + else + print("Variable " + vars(i) + " does not exist.") + end if ; plot_check loop + if (isvar("useUnits")) then + delete(useUnits) + end if + if (isvar("lon3")) then + delete(lon3) + end if + if (isvar("lat3")) then + delete(lat3) + end if + end do ; end variable loop +; end do ; seasons loop +; remove error file if program completes successfully. + filo = wkdir +"/set2_error_file" + system("/bin/rm -f " + filo) +end diff --git a/lnd_diag/model1-model2/set_12.ncl b/lnd_diag/model1-model2/set_12.ncl new file mode 100755 index 00000000..5c43155a --- /dev/null +++ b/lnd_diag/model1-model2/set_12.ncl @@ -0,0 +1,1513 @@ +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$DIAG_SHARED/$VAR_MASTER" +load "$DIAG_SHARED/lnd_func.ncl" +;************************************************ +begin + print ("=========================================") + print ("Starting: set_3.ncl") + print ("Start Time: "+systemfunc("date") ) + print ("=========================================") + resdir = getenv("DIAG_RESOURCES") + inputdir = getenv("INPUT_FILES") + diag_home = getenv("DIAG_HOME") + plot_type = getenv("PLOTTYPE") + paleo = getenv("paleo") + land_mask = getenv("land_mask1") + landmask = stringtofloat(land_mask) + cn = getenv("CN") + clamp = getenv("CLAMP") + casa = getenv("CASA") + hydroFlag = getenv("HYDRO") + obsdata = getenv("OBS_DATA") + debugFlag = getenv("debugFlag") + useCommon1 = getenv("UseCommonName_1") + useCommon2 = getenv("UseCommonName_2") + plot_type = getenv("PLOTTYPE") + +;************************************************* +; get case names and create filenames to read in +;************************************************* + ptmpdir1 = getenv("PTMPDIR_1") + ptmpdir2 = getenv("PTMPDIR_2") + wkdir = getenv("WKDIR") + +;*********************************************** +; import regional information +;*********************************************** + + secondsPerDay = 86400. + months = 12 ; number of months + regions_file = addfile(resdir+"/region_definitions.nc","r") +; regions_file = addfile("/glade/u/home/dlawren/region_polar.nc","r") + box_n = regions_file->BOX_N + box_w = regions_file->BOX_W + box_e = regions_file->BOX_E + box_s = regions_file->BOX_S + nreg = dimsizes(box_s) ; number of regions + tmp = regions_file->PS_ID + ps_id = charactertostring(tmp) + delete(tmp) + tmp = regions_file->PTITSTR + ptitstr = charactertostring(tmp) + delete(tmp) + tmp = regions_file->BOXSTR + boxstr = charactertostring(tmp) + delete(tmp) + +;************************************************* +; main loop +;************************************************* + +nplots = 9 +nmons = 13 + +; loop over plots +do p=0,nplots-1 + +print("Now printing plot #" + p) + if (p .eq. 0) then + if (cn .eq. 1) then + fileName = "set3_cn_landFlx.txt" + else + fileName = "set3_landFlx.txt" + end if + plot_name = "landf" + titstr = new(7,"string") + titstr(0) = "2m Air Temperature (~S~o~N~K)" + titstr(1) = "Precipitation (mm day~S~-1~N~)" + titstr(2) = "Runoff (mm day~S~-1~N~)" + titstr(3) = "Snow Depth (m)" + titstr(4) = "Latent Heat Flux (W m~S~-2~N~)" + titstr(5) = "Photosynthesis (gC m~S~-2~N~day~S~-1~N~)" + titstr(6) = "Leaf Area Index" + end if + if (p .eq. 1) then + fileName = "set3_radFlx.txt" + plot_name = "radf" + titstr = new(7,"string") + titstr(0) = "Incoming Solar" + titstr(1) = "Albedo" + titstr(2) = "Absorbed Solar" + titstr(3) = "Incoming Longwave" + titstr(4) = "Emitted Longwave" + titstr(5) = "Net Longwave" + titstr(6) = "Net Radiation" + end if + if (p .eq. 2) then + fileName = "set3_turbFlx.txt" + plot_name = "turbf" + titstr = new(10,"string") + titstr(0) = "Net Radiation" + titstr(1) = "Sensible Heat" + titstr(2) = "Latent Heat" + titstr(3) = "Transpiration" + titstr(4) = "Canopy Evaporation" + titstr(5) = "Ground Evaporation" + titstr(6) = "Ground Heat + Snow Melt" + titstr(7) = "Soil moisture factor (BTRAN)" + titstr(8) = "Evaporative Fraction" + titstr(9) = "Total LAI" + end if + if (p .eq. 3) then + if (clamp .eq. 1) then + fileName = "set3_clampFlx.txt" + plot_name = "cnFlx" + titstr = new(6,"string") + titstr(0) = "Net Ecosystem Exchange" + titstr(1) = "GPP" + titstr(2) = "NPP" + titstr(3) = "Autotrophic Respiration" + titstr(4) = "Heterotrophic Respiration" + titstr(5) = "Net Ecosystem Production" + else + if (cn .eq. 1) then + fileName = "set3_cnFlx.txt" + plot_name = "cnFlx" + titstr = new(10,"string") + titstr(0) = "Net Ecosystem Exchange" + titstr(1) = "GPP" + titstr(2) = "NPP" + titstr(3) = "Autotrophic Respiration" + titstr(4) = "Heterotrophic Respiration" + titstr(5) = "Ecosystem Respiration" + titstr(6) = "Surface CH4 Flux" + titstr(7) = "Leached Mineral Nitrogen" + titstr(8) = "Soil NO3 Pool Loss to Leaching" + titstr(9) = "Soil NO3 Pool Loss to Runoff" + else + if (casa .eq. 1) then + fileName = "set3_casaFlx.txt" + plot_name = "cnFlx" + titstr = new(5,"string") + titstr(0) = "Net Ecosystem Exchange" + titstr(1) = "GPP" + titstr(2) = "NPP" + titstr(3) = "Autotrophic Respiration" + titstr(4) = "Heterotrophic Respiration" + else + continue + end if + end if + end if + end if + if (p .eq. 4) then + if (cn .eq. 1) then + fileName = "set3_fireFlx.txt" + plot_name = "frFlx" + titstr = new(6,"string") + titstr(0) = "Column-Level Fire C Loss" + titstr(1) = "Column-Level Fire N Loss" + titstr(2) = "PFT-Level Fire C Loss" + titstr(3) = "PFT-Level Fire N Loss" + titstr(4) = "Fractional Area Burned" + titstr(5) = "Fractional Area Burned - Crop" + else + continue + end if + end if + if (p .eq. 5) then + fileName = "set3_moistEnergyFlx.txt" + plot_name = "moistEnergyFlx" + titstr = new(3,"string") + titstr(0) = "Precipitation" + titstr(1) = "Net Radiation" + titstr(2) = "Evapotranspiration" + end if + if (p .eq. 6) then + if (hydroFlag .eq. 1) then + fileName = "set3_hydro.txt" + plot_name = "hydro" + titstr = new(5,"string") + titstr(0) = "Water in Unconfined Aquifer" ; WA + titstr(1) = "Water Table Depth" ; ZWT + titstr(2) = "Aquifer Recharge Rate" ; QCHARGE + titstr(3) = "Frac Water Table at Surface" ; FCOV + titstr(4) = "Total Water Storage" ; TWS + else + continue + end if + end if + if (p .eq. 7) then + fileName = "set3_snow.txt" + plot_name = "snow" + if (paleo .eq. "True") then + titstr = new(4,"string") + titstr(0) = "Snow height" ; SNOWDP + titstr(1) = "Fractional Snow Cover" ; FSNO + titstr(2) = "Snow Water Equivalent" ; H2OSNO + else + titstr = new(4,"string") + titstr(0) = "Snow height (USAF/CMC)" ; SNOWDP + titstr(1) = "Fractional Snow Cover (NOAA-AVHRR)"; FSNO + titstr(2) = "Snow Water Equivalent (CMC)" ; H2OSNO + end if + end if + if (p .eq. 8) then + fileName = "set3_albedo.txt" + plot_name = "albedo" + titstr = new(5,"string") + titstr(0) = "Visible BlackSky Albedo" ; VBSA + titstr(1) = "NearIR BlackSky Albedo" ; NBSA + titstr(2) = "Visible WhiteSky Albedo" ; VWSA + titstr(3) = "NearIR WhiteSky Albedo" ; NWSA + titstr(4) = "All Sky Albedo" ; ASA + end if + +;---------------------------------------------------------- +; read in file and parse into variables +;---------------------------------------------------------- + tmp = stringtocharacter(asciiread(inputdir+"/"+fileName,-1,"string")) + nvars = dimsizes(tmp(:,0)) +print(" p = " + p + " nvars = " + nvars) + nplotsPerCol = (nvars+1)/2 + scale_type = new(nvars,"string") + vars = new(nvars,"string") + vcheck1 = new(nvars,"string") + vcheck2 = new(nvars,"string") + do i=0,nvars-1 + scale_type(i) = charactertostring(tmp(i,0:12)) + vars(i) = charactertostring(tmp(i,16:)) + end do + delete(tmp) + + plot_check = True ; 20dec04 - nanr; added so that plot_check would be defined for underived vars. + +; Define common plots resources -------- + plot1 = new(1,graphic) + if (p .eq. 5) + plot = new(2,graphic) + else + plot = new(nvars,graphic) + end if + + res = True + res@xyLineColors = (/"red","blue","black","black"/) + res@xyDashPatterns = (/0.,0.,16.,2./) ; solid, dashed + res@xyLineThicknesses = (/2.,2.,2.,2./) + res@tmXBMinorOn = False + res@tiMainOn = False + res@gsnDraw = False + res@gsnFrame = False + res@tiXAxisFontHeightF = 0.030 + res@tiYAxisFontHeightF = 0.030 + res@txFontHeightF = 0.030 + res@tmYLLabelFontHeightF = 0.030 + res@tmYLFormat = "0@!;*?5g" + res@vpKeepAspect = False + res@vpWidthF = 1.2 + +; creating x-axis labels + month = ispan(1,months+1,1) + month!0 = "month" + res@trXMinF = 1 + res@trXMaxF = months+1 + res@tmXBMode = "Explicit" + res@tmXBValues = month + res@tmXBLabels = (/"J","F","M","A","M","J","J","A","S","O","N","D","J"/) + res@pmLegendDisplayMode = "Never" + res@vpHeightF = 0.4 + res@vpWidthF = 0.8 + + vv = ispan(0,nvars-1,1) + mm = ispan(0,months-1,1) + + + cases = new(5,"string") + prefix = new(2,"string") + if (useCommon1 .eq. "True") then + name1 = "commonName_1" + else + name1 = "caseid_1" + end if + if (useCommon2 .eq. "True") then + name2 = "commonName_2" + else + name2 = "caseid_2" + end if + name11 = "prefix_1" + name22 = "prefix_2" + cases(0) = getenv(name1) + cases(1) = getenv(name2) + prefix(0) = getenv(name11) + prefix(1) = getenv(name22) + in1 = addfile(ptmpdir1+"/"+prefix(0)+"MONS_climo.nc","r") + in2 = addfile(ptmpdir2+"/"+prefix(1)+"MONS_climo.nc","r") + if(isfilevar(in1,"levsoi")) then + nlev1 = getfilevardimsizes(in1,"levsoi") + end if + if(isfilevar(in1,"levgrnd")) then + nlev1 = getfilevardimsizes(in1,"levgrnd") + end if + fland1 = in1->landfrac + area1 = in1->area + time1 = in1->time + lat1 = in1->lat + lon1 = in1->lon + time = in1->time + nlat1 = dimsizes(lat1) + nlon1 = dimsizes(lon1) + + ; read case 2 + if(isfilevar(in2,"levsoi")) then + nlev2 = getfilevardimsizes(in2,"levsoi") + end if + if(isfilevar(in2,"levgrnd")) then + nlev2 = getfilevardimsizes(in2,"levgrnd") + end if + fland2 = in2->landfrac + area2 = in2->area + time2 = in2->time + lat2 = in2->lat + lon2 = in2->lon + nlat2 = dimsizes(lat2) + nlon2 = dimsizes(lon2) + + if (lon1(0) .ge. 0) then + area1 = lonFlip(area1) + fland1 = lonFlip(fland1) + end if + + if (lon2(0) .ge. 0) then + area2 = lonFlip(area2) + fland2 = lonFlip(fland2) + end if + + if (isatt(in1,"yrs_averaged")) then + yrs_ave1 = in1@yrs_averaged + cases(0) = cases(0)+" (yrs "+yrs_ave1+")" + end if + if (isatt(in2,"yrs_averaged")) then + yrs_ave2 = in2@yrs_averaged + cases(1) = cases(1)+" (yrs "+yrs_ave2+")" + end if + + +;****************************** +; Read observed data +;****************************** + + if (p .eq. 0) then ; read observations for landf variables + ptr_wm = addfile(obsdata+"/0_5_WILLMOTT_ALLMONS_climo.nc","r") ; temp,precip + case3 = ptr_wm@case_id + lat3 = ptr_wm->lat + time3 = ptr_wm->month + nlat3 = dimsizes(lat3) + lon3 = ptr_wm->lon + nlon3 = dimsizes(lon3) + ptr_grdc = addfile(obsdata+"/0_5_GRDC_ALLMONS_climo.nc","r") ; runoff + case2_r = ptr_grdc@case_id + ptr_fd = addfile(obsdata+"/0_5_FOSTERDAVY_ALLMONS_climo.nc","r") ; snowdepth + case2_sd = ptr_fd@case_id + + landmask3 = new((/nvars,nlat3,nlon3/),"double") + area3 = new((/nvars,nlat3,nlon3/),"double") + + ptr_scf = addfile(obsdata+"/NOAA_AVHRR_SNOWF_ALLMONS_climo.100318.nc","r") ; snow cover + ptr_sd = addfile(obsdata+"/CMC_SNOWD_ALLMONS_climo.100318.nc","r") ; snow depth + ptr_swe = addfile(obsdata+"/CMC_SWE_ALLMONS_climo.100318.nc","r") ; swe + + lat4 = ptr_sd->lat + nlat4 = dimsizes(lat4) + lon4 = ptr_sd->lon + nlon4 = dimsizes(lon4) + + landmask4 = new((/nvars,nlat4,nlon4/),"double") + area4 = new((/nvars,nlat4,nlon4/),"double") + + ; 0.9x1.25 degree = landmask6 - lhf and gpp data is at 0.9x.125 degree + + ptr_lhf = addfile(obsdata+"/MR_LHF_0.9x1.25_ALLMONS_climo.nc","r") ; lhf + ptr_gpp = addfile(obsdata+"/MR_GPP_0.9x1.25_ALLMONS_climo.nc","r") ; gpp + + lat6 = ptr_lhf->lat + nlat6 = dimsizes(lat6) + lon6 = ptr_lhf->lon + nlon6 = dimsizes(lon6) + + landmask6 = new((/nvars,nlat6,nlon6/),"float") + area6 = new((/nvars,nlat6,nlon6/),"float") + + ptr_lai = addfile(obsdata+"/MODIS_LAI_ALLMONS_climo.nc","r") ; lai + + lat7 = ptr_lai->lat + nlat7 = dimsizes(lat7) + lon7 = ptr_lai->lon + nlon7 = dimsizes(lon7) + + landmask7 = new((/nvars,nlat7,nlon7/),"float") + area7 = new((/nvars,nlat7,nlon7/),"float") + + landmask3!0 = "var" + landmask3&var = vv + landmask3!1 = "lat" + landmask3&lat = lat3 + landmask3!2 = "lon" + landmask3&lon = lon3 + area3!0 = "var" + area3&var = vv + area3!1 = "lat" + area3&lat = lat3 + area3!2 = "lon" + area3&lon = lon3 + + landmask4!0 = "var" + landmask4&var = vv + landmask4!1 = "lat" + landmask4!2 = "lon" + landmask4&lat = lat4 + landmask4&lon = lon4 + area4!0 = "var" + area4&var = vv + area4!1 = "lat" + area4&lat = lat4 + area4!2 = "lon" + area4&lon = lon4 + + landmask6!0 = "var" + landmask6&var = vv + landmask6!1 = "lat" + landmask6!2 = "lon" + landmask6&lat = lat6 + landmask6&lon = lon6 + area6!0 = "var" + area6&var = vv + area6!1 = "lat" + area6&lat = lat6 + area6!2 = "lon" + area6&lon = lon6 + + landmask7!0 = "var" + landmask7&var = vv + landmask7!1 = "lat" + landmask7!2 = "lon" + landmask7&lat = lat7 + landmask7&lon = lon7 + area7!0 = "var" + area7&var = vv + area7!1 = "lat" + area7&lat = lat7 + area7!2 = "lon" + area7&lon = lon7 + + end if + if (p .eq. 8) ; albedo + ; 64x128 degree = landmask4 - albedo data is at T42 + ptr_alb = addfile(obsdata+"/T42_MODIS_ALLMONS_climo.070523.nc","r") ; albedo + + ; 64x128 degree = landmask4 - albedo data is at T42 + ptr_asa = addfile(obsdata+"/modisradweighted.nc","r") ; ASA albedo + + lat5 = ptr_alb->lat + nlat5 = dimsizes(lat5) + lon5 = ptr_alb->lon + nlon5 = dimsizes(lon5) + + landmask5 = new((/nvars,nlat5,nlon5/),"double") + area5 = new((/nvars,nlat5,nlon5/),"double") + landmask5!0 = "var" + landmask5&var = vv + landmask5!1 = "lat" + landmask5!2 = "lon" + landmask5&lat = lat5 + landmask5&lon = lon5 + area5!0 = "var" + area5&var = vv + area5!1 = "lat" + area5&lat = lat5 + area5!2 = "lon" + area5&lon = lon5 + array5 = new((/nvars, months, nlat5, nlon5/),"double") ; 1 degree + end if + + sFlag = new((/nvars/),"integer") + aFlag = new((/nvars/),"integer") + sFlag = 0 + aFlag = 0 + +;************************** +; Variable loop ----------- read both cases of each variable into memeory. +;************************** + array1 = new((/nvars, months, nlat1, nlon1/),"float") ; T42 + array2 = new((/nvars, months, nlat2, nlon2/),"float") ; T42 + array3 = new((/nvars, months, nlat3, nlon3/),"double") ; 05 degree + array4 = new((/nvars, months, nlat4, nlon4/),"double") ; 1 degree + array6 = new((/nvars, months, nlat6, nlon6/),"float") ; 0.9x1.25 degree + array7 = new((/nvars, months, nlat7, nlon7/),"float") ; 0.5 degree + + units = new(nvars,"string") + long_name = new(nvars,"string") + +; read all variables into memory first and stuff them into two arrays (2 cases) + pcheck1 = 0 + pcheck2 = 0 + snowFlag = 0 + do i=0,nvars-1 + + print("vars = " + vars(i) ) + if (paleo .ne. "True") then + if (vars(i) .eq. "SNOWDP") then + snowFlag = snowFlag + 1 + end if + end if + +; check for variables in case 1 + vcheck1(i) = True + vcheck2(i) = True + info = var_init(vars(i)) + if(isfilevar(in1,vars(i)))then + x1 = in1->$vars(i)$ + plot_check = True + else + if (info@derivedVariable .eq. True) then + x1 = derive_land( vars(i),in1) + else + print("variable "+ vars(i)+ " is not a defined variable in case1.") + pcheck1 = pcheck1 + 1 + vcheck1(i) = False + end if + end if +; check for variables in case 2 + if(isfilevar(in2,vars(i)))then + x2 = in2->$vars(i)$ + plot_check = True + else + if (info@derivedVariable .eq. True) then + x2 = derive_land( vars(i),in2) + else + print("variable "+ vars(i)+ " is not a defined variable in case2.") + pcheck2 = pcheck2 + 1 + vcheck2(i) = False + end if + end if + +; if neither case has the variables, don't plot them. + if (pcheck1 .eq. nvars .and. pcheck2 .eq. nvars) then + plot_check = False + end if + +;read in observations (if present) + obsFlag = 0 ; (1 = on; 0 = off) + if (paleo .ne. "True") then + if (vars(i) .eq. "TSA" ) then + x3 = ptr_wm->$vars(i)$ + l3 = ptr_wm->datamask + a3 = ptr_wm->weight + cases(2) = "Willmott-Matsuura(T,P),GRDC(R),USAF/ETAC(S),FLUXNET(LHF,GPP)" + obsFlag = 1 + sFlag(i) = 0 + else + if (vars(i) .eq. "PREC" .and. p .ne. 5) then + x3 = ptr_wm->PREC + l3 = ptr_wm->datamask + a3 = ptr_wm->weight + cases(2) = "Willmott-Matsuura(T,P),GRDC(R),USAF/ETAC(S),FLUXNET(LHF,GPP)" + obsFlag = 1 + sFlag(i) = 0 + else + if (vars(i) .eq. "TOTRUNOFF" ) then + x3 = ptr_grdc->RUNOFF + l3 = ptr_grdc->datamask + a3 = ptr_grdc->weight + obsFlag = 1 + sFlag(i) = 0 + else + if (vars(i) .eq. "SNOWDP" .and. snowFlag .eq. 1) then + if (isvar("x3")) then + delete(x3) + end if + x3 = ptr_fd->$vars(i)$ + l3 = ptr_fd->datamask + a3 = ptr_fd->weight + cases(2) = "USAF" + x3@_FillValue = 1e30 + obsFlag = 1 + sFlag(i) = 0 + else + if (vars(i) .eq. "FSNO" ) then + x4 = ptr_scf->SCF + x4 = x4 * 0.01 ; convert from percent to 0-1 + l4 = ptr_scf->landmask + lf = ptr_scf->landfrac + a4 = ptr_scf->weight + cases(2) = "NOAA AVHRR (1967-2003)" + x4@_FillValue = 1e30 + obsFlag = 1 + sFlag(i) = 1 + else + if (vars(i) .eq. "H2OSNO" ) then + x4 = ptr_swe->SWE + l4 = ptr_swe->landmask + lf = ptr_swe->landfrac + a4 = ptr_swe->weight + cases(3) = "CMC (1980-1996)" + x4@_FillValue = 1e30 + obsFlag = 1 + sFlag(i) = 1 + else + if (vars(i) .eq. "SNOWDP" .and. snowFlag .eq. 2) then + if (isvar("x4")) then + delete(x4) + end if + x4 = ptr_sd->SNOWD + l4 = ptr_sd->landmask + lf = ptr_sd->landfrac + a4 = ptr_sd->weight + cases(3) = "CMC (1980-1996)" + x4@_FillValue = 1e30 + obsFlag = 1 + sFlag(i) = 1 + else + if (vars(i) .eq. "VBSA") then + if (isvar("x5")) then + delete(x5) + end if + x5 = ptr_alb->VBSA + cases(3) = "MODIS (2001-2003)" + l5 = ptr_alb->LANDFRAC + a5 = ptr_alb->weight + x5@_FillValue = 1e30 + obsFlag = 1; + sFlag(i) = 0; + aFlag(i) = 1; + else + if (vars(i) .eq. "NBSA") then + if (isvar("x5")) then + delete(x5) + end if + x5 = ptr_alb->NBSA + cases(3) = "MODIS (2001-2003)" + l5 = ptr_alb->LANDFRAC + a5 = ptr_alb->weight + x5@_FillValue = 1e30 + obsFlag = 1; + sFlag(i) = 0; + aFlag(i) = 1; + else + if (vars(i) .eq. "VWSA") then + if (isvar("x5")) then + delete(x5) + end if + x5 = ptr_alb->VWSA + cases(3) = "MODIS (2001-2003)" + l5 = ptr_alb->LANDFRAC + a5 = ptr_alb->weight + x5@_FillValue = 1e30 + obsFlag = 1; + sFlag(i) = 0; + aFlag(i) = 1; + else + if (vars(i) .eq. "NWSA") then + if (isvar("x5")) then + delete(x5) + end if + x5 = ptr_alb->NWSA + cases(3) = "MODIS (2001-2003)" + l5 = ptr_alb->LANDFRAC + a5 = ptr_alb->weight + x5@_FillValue = 1e30 + obsFlag = 1; + sFlag(i) = 0; + aFlag(i) = 1; + else + if (vars(i) .eq. "ASA") then ; read ASA from different modis file (ptr_asa) + if (isvar("x5")) then + delete(x5) + end if + x5 = ptr_asa->BRDALB ; broadband albedo + cases(3) = "MODIS (2001-2003)" + l5 = ptr_alb->LANDFRAC ; use LANDFRAC and weight from ptr_alb file. + a5 = ptr_alb->weight + x5@_FillValue = 1e30 + obsFlag = 1; + sFlag(i) = 0; + aFlag(i) = 1; + else + if (vars(i) .eq. "LHEAT") then + x6 = ptr_lhf->LHF + l6 = ptr_lhf->datamask + a6 = ptr_lhf->area + obsFlag = 1 + sFlag(i) = 0 + else + if ((vars(i) .eq. "FPSN" .and. cn .eq. 0) .or. vars(i) .eq. "GPP") then + x6 = ptr_gpp->GPP + l6 = ptr_gpp->datamask + a6 = ptr_gpp->area + obsFlag = 1 + sFlag(i) = 0 + else + if (vars(i) .eq. "TLAI") then + x7 = ptr_lai->TLAI + l7 = ptr_lai->landmask + a7 = ptr_lai->area + obsFlag = 1 + sFlag(i) = 0 + else + obsFlag = 0 + sFlag(i) = 0 + aFlag(i) = 0 + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if ; paleo + + if (paleo .eq. "True") then + obsFlag = 0 + end if + + if (vcheck1(i) .eq. True) then + ; catch NaNs + x1 = catchNaN(x1) + x1 = scale_var(x1,scale_type(i),info) + + if (obsFlag .eq. 1) then ; process variables with observed data + if (isvar("x3") ) + x3 = catchNaN(x3) + x3!0 = "mons" + x3&mons = mm + x3!1 = "lat" + x3&lat = lat3 + x3!2 = "lon" + x3&lon = lon3 + a3!0 = "lat" + a3!1 = "lon" + l3!0 = "lat" + l3!1 = "lon" + a3&lat = lat3 + a3&lon = lon3 + l3&lat = lat3 + l3&lon = lon3 + if (min(x3&lon) .ge. 0) then + x3 = lonFlip(x3) + end if + if (min(a3&lon) .ge. 0) then + a3 = lonFlip(a3) + end if + if (min(l3&lon) .ge. 0) then + l3 = lonFlip(l3) + end if + array3(i,:,:,:) = x3 ; Wrapper fcn that copies coord Vars + landmask3(i,:,:) = l3 + area3(i,:,:) = a3 + delete(l3) + delete(a3) + delete(x3) + end if + + if (isvar("x4") .and. sFlag(i) .eq. 1) then + x4 = catchNaN(x4) + x4!0 = "mons" + x4&mons = mm + x4!1 = "lat" + x4&lat = lat4 + x4!2 = "lon" + x4&lon = lon4 + a4!0 = "lat" + a4!1 = "lon" + l4!0 = "lat" + l4!1 = "lon" + lf!0 = "lat" + lf!1 = "lon" + a4&lat = lat4 + a4&lon = lon4 + l4&lat = lat4 + l4&lon = lon4 + lf&lat = lat4 + lf&lon = lon4 + if (min(x4&lon) .ge. 0) then + x4 = lonFlip(x4) + end if + if (min(a4&lon) .ge. 0) then + a4 = lonFlip(a4) + end if + if (min(l4&lon) .ge. 0) then + l4 = lonFlip(l4) + end if + if (min(lf&lon) .ge. 0) then + lf = lonFlip(lf) + end if + + ; mask out fractional grid cells. + l41d = ndtooned(l4) + lf1d = ndtooned(lf) + l41d = mask(l41d, lf1d .ne. 1, False) + l4 = onedtond(l41d,(/nlat4,nlon4/)) + + array4(i,:,:,:) = x4 ; Wrapper fcn that copies coord Vars + landmask4(i,:,:) = l4 + area4(i,:,:) = a4 + delete(l4) + delete(a4) + delete(x4) + delete(lf) + end if + + if (isvar("x5") .and. aFlag(i) .eq. 1) then + x5 = catchNaN(x5) + x5!0 = "mons" + x5&mons = mm + x5!1 = "lat" + x5&lat = lat5 + x5!2 = "lon" + x5&lon = lon5 + a5!0 = "lat" + a5!1 = "lon" + l5!0 = "lat" + l5!1 = "lon" + a5&lat = lat5 + a5&lon = lon5 + l5&lat = lat5 + l5&lon = lon5 + + if (min(x5&lon) .ge. 0) then + x5 = lonFlip(x5) + end if + if (min(a5&lon) .ge. 0) then + a5 = lonFlip(a5) + end if + if (min(l5&lon) .ge. 0) then + l5 = lonFlip(l5) + end if + + array5(i,:,:,:) = x5 ; Wrapper fcn that copies coord Vars + landmask5(i,:,:) = l5 + area5(i,:,:) = a5 + delete(l5) + delete(a5) + delete(x5) + end if + + if (isvar("x6")) then + x6 = catchNaN(x6) + x6!0 = "mons" + x6&mons = mm + x6!1 = "lat" + x6&lat = lat6 + x6!2 = "lon" + x6&lon = lon6 + a6!0 = "lat" + a6!1 = "lon" + l6!0 = "lat" + l6!1 = "lon" + a6&lat = lat6 + a6&lon = lon6 + l6&lat = lat6 + l6&lon = lon6 + + if (min(x6&lon) .ge. 0) then + x6 = lonFlip(x6) + end if + if (min(a6&lon) .ge. 0) then + a6 = lonFlip(a6) + end if + if (min(l6&lon) .ge. 0) then + l6 = lonFlip(l6) + end if + + array6(i,:,:,:) = x6 ; Wrapper fcn that copies coord Vars + landmask6(i,:,:) = l6 + area6(i,:,:) = a6 + delete(l6) + delete(a6) + delete(x6) + end if + + if (isvar("x7")) then + x7 = catchNaN(x7) + x7!0 = "mons" + x7&mons = mm + x7!1 = "lat" + x7&lat = lat7 + x7!2 = "lon" + x7&lon = lon7 + a7!0 = "lat" + a7!1 = "lon" + l7!0 = "lat" + l7!1 = "lon" + a7&lat = lat7 + a7&lon = lon7 + l7&lat = lat7 + l7&lon = lon7 + + if (min(x7&lon) .ge. 0) then + x7 = lonFlip(x7) + end if + if (min(a7&lon) .ge. 0) then + a7 = lonFlip(a7) + end if + if (min(l7&lon) .ge. 0) then + l7 = lonFlip(l7) + end if + + array7(i,:,:,:) = x7 ; Wrapper fcn that copies coord Vars + landmask7(i,:,:) = l7 + area7(i,:,:) = a7 + delete(l7) + delete(a7) + delete(x7) + end if + + end if + + if (min(x1&lon) .ge. 0) then + x1 = lonFlip(x1) + end if + array1(i,:,:,:) = x1 + units(i) = x1@units + long_name(i) = x1@long_name + delete(x1) + end if + + if (vcheck2(i) .eq. True) then + x2 = catchNaN(x2) + x2 = scale_var(x2,scale_type(i),info) + if (min(x2&lon) .ge. 0) then + x2 = lonFlip(x2) + end if + array2(i,:,:,:) = x2 + delete(x2) + end if + if (paleo .ne. "True") then + if (snowFlag .eq. 1) then ; run SNOWDP twice to see both obs datasets + i = i-1 + end if + end if + end do ; end variable loop + +;********************************** +; now plot vars by region +;********************************** +if(plot_check.eq.True)then + + if (paleo .eq. "True") then + startRegion = nreg-3 ; N. + S. hemisphere land + global + else + startRegion = 0 + end if + + do region=startRegion,nreg-1 + +; open ps file for plotting + wks = gsn_open_wks(plot_type,wkdir + "set3_" + plot_name + "_" + ps_id(region)) + + pres = True ; panel only resources +; create common legend + pres@txString = ptitstr(region)+boxstr(region) ; common title + +; each regional plot contains all variables, so loop over vars +; moistEnergyFlx plot draws three variabls on one plot. Other plots have only 1 variable per plot. + if (p .eq. 5) then + data1 = new((/nvars,nmons/),"double") + data2 = new((/nvars,nmons/),"double") + labels = new(3,"string") + do i = 0,nvars-1 + + ; process model results + compare = "model" + if (vcheck1(i) .eq. True) then + x1 = array1(i,:,:,:) + if (vars(i) .eq. "RNET") then + x1 = (x1 / 2.501e6 ) * secondsPerDay + end if + var_plot1 = regional_values(x1,region,area1,fland1,scale_type(i),\ + regions_file,time1, nmons,compare) + data1(i,:) = var_plot1 + delete(x1) + end if + if (vcheck2(i) .eq. True) then + x2 = array2(i,:,:,:) + if (vars(i) .eq. "RNET") then + x2 = (x2 / 2.501e6 ) * secondsPerDay + end if + var_plot2 = regional_values(x2,region,area2,fland2,scale_type(i),\ + regions_file,time2, nmons,compare) + data2(i,:) = var_plot2 + delete(x2) + end if + + res@tiYAxisString = "mm/d" + labels(i) = titstr(i) + + if (vcheck1(i) .eq. True) then + delete(var_plot1) + end if + if (vcheck2(i) .eq. True) then + delete(var_plot2) + end if + end do + res@gsnLeftString = cases(0) + plot(0) = gsn_csm_xy(wks,month,data1,res) + res@gsnLeftString = cases(1) + plot(1) = gsn_csm_xy(wks,month,data2,res) + delete(data1) + delete(data2) + delete(res@gsnLeftString) + else + do i = 0,nvars-1 + compare = "model" + +; snowdp plot draws 2 obs + 1 variable on snowdp plot. Other plots have only 1 variable per plot. + if (vars(i) .eq. "SNOWDP") then + if (paleo .eq. "True") then ; no obs + data = new((/2,nmons/),"double") + else + data = new((/4,nmons/),"double") + end if + res@xyDashPatterns = (/0.,0.,16.,16./) ; solid, dashed + res@xyLineColors = (/"red","blue","blue","black"/) + + ; process model 1 results + compare = "model" + x1 = array1(i,:,:,:) + var_plot1 = regional_values(x1,region,area1,fland1,scale_type(i),\ + regions_file,time1,nmons,compare) + data(0,:) = var_plot1 + delete(var_plot1) + delete(x1) + + ; process model 2 results + compare = "model" + x2 = array2(i,:,:,:) + var_plot2 = regional_values(x2,region,area2,fland2,scale_type(i),\ + regions_file,time1,nmons,compare) + data(1,:) = var_plot2 + delete(var_plot2) + delete(x2) + + if (paleo .eq. "False") then + ; process 1st set of obs (FOSTERDAVY/USAF - dashed line) + compare = "obs" + x3 = array3(i,:,:,:) + var_plot3 = regional_values(x3,region,area3(i,:,:),landmask3(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + data(2,:) = var_plot3 + delete(var_plot3) + delete(x3) + + ; process 2nd set of obs (CMC) - (dashed line) + x4 = array4(i,:,:,:) + var_plot4 = regional_values(x4,region,area4(i,:,:),landmask4(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + data(3,:) = var_plot4 + delete(var_plot4) + delete(x4) + end if ; paleo + + res@tiYAxisString = "mm/d" + res@gsnCenterString = titstr(i) + + plot(i) = gsn_csm_xy(wks,month,data,res) + delete(data) + else + if (vars(i) .eq. "FSNO") then + delete(res@xyLineColors) + res@xyDashPatterns = (/0.,0.,16.,16./) ; solid, dashed + res@xyLineThicknesses = (/2.,2.,2.,2./) + res@xyLineColors = (/"red","blue","black","black"/) + end if + if (vars(i) .eq. "H2OSNO") then + delete(res@xyDashPatterns) + delete(res@xyLineColors) + res@xyLineThicknesses = (/2.,2.,2.,2./) + res@xyDashPatterns = (/0.,0.,16.,16./) ; solid, dashed + res@xyLineColors = (/"red","blue","black","black"/) + end if + if (aFlag(i) .eq. 1) then + delete(res@xyDashPatterns) + delete(res@xyLineColors) + delete(res@xyLineThicknesses) + res@xyLineThicknesses = (/2.,2.,2./) + res@xyDashPatterns = (/0.,0.,16./) ; solid, dashed + res@xyLineColors = (/"red","blue","black"/) + end if + + ; process model + if (vcheck1(i) .eq. True) + x1 = array1(i,:,:,:) + compare = "model" + if (vars(i) .eq. "ALBEDO" .or. aFlag(i) .eq. 1) then + var_plot1 = regional_albedo(x1,region,area1,fland1,scale_type(i),\ + regions_file,nmons,compare) + else + var_plot1 = regional_values(x1,region,area1,fland1,scale_type(i),\ + regions_file,time1,nmons,compare) + end if + delete(x1) + end if + if (vcheck2(i) .eq. True) + x2 = array2(i,:,:,:) + if (vars(i) .eq. "ALBEDO" .or. aFlag(i) .eq. 1) then + var_plot2 = regional_albedo(x2,region,area2,fland2,scale_type(i),\ + regions_file,nmons,compare) + else + var_plot2 = regional_values(x2,region,area2,fland2,scale_type(i),\ + regions_file,time2,nmons,compare) + end if + delete(x2) + end if + + + ; process variables with observed data + if (obsFlag .eq. 1) then + data = new((/3,nmons/),"double") + compare = "obs" + if (sFlag(i) .eq. 1) then ; processing snow vars at 1x1 degree + x4 = array4(i,:,:,:) + var_plot3 = regional_values(x4,region,area4(i,:,:),landmask4(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x4) + else + if ( aFlag(i) .eq. 1) then ; processing albedo vars at 1x1 degree + x5 = array5(i,:,:,:) + var_plot3 = regional_values(x5,region,area5(i,:,:),landmask5(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x5) + else + x3 = array3(i,:,:,:) + var_plot3 = regional_values(x3,region,area3(i,:,:),landmask3(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x3) + if (vars(i) .eq. "LHEAT") then + x6 = array6(i,:,:,:) + var_plot3 = regional_values(x6,region,area6(i,:,:),landmask6(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x6) + else + if ((vars(i) .eq. "FPSN" .and. cn .eq. 0) .or. vars(i) .eq. "GPP") then + x6 = array6(i,:,:,:) + var_plot3 = regional_values(x6,region,area6(i,:,:),landmask6(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x6) + end if + if (vars(i) .eq. "TLAI") then + x7 = array7(i,:,:,:) + var_plot3 = regional_values(x7,region,area7(i,:,:),landmask7(i,:,:),scale_type(i),\ + regions_file,time1,nmons,compare) + delete(x7) + end if + end if + end if + end if + data(2,:) = var_plot3 + delete(var_plot3) + else + data = new((/2,nmons/),"double") + end if + + if (isvar("var_plot1")) then + data(0,:) = var_plot1 + delete(var_plot1) + end if + if (isvar("var_plot2")) then + data(1,:) = var_plot2 + delete(var_plot2) + end if + + res@tiYAxisString = units(i) + res@gsnCenterString = titstr(i) + + plot(i) = gsn_csm_xy(wks,month,data,res) + delete(data) + delete(res@gsnCenterString) + end if ; end SNOWDP loop + end do ; end variable loop + end if ; end plot 5 fork + +; and plot it already using gsn_panel to print all plots onto it from plot array + + +;************************************************* +; gsnpanel takes this plot array and massages it and puts it onto page for us. +;************************************************* + pres@gsnFrame = False ; don't advance yet so add legend + pres@gsnMaximize = True ; make as large as possible + pres@gsnPaperOrientation = "Portrait" + pres@gsnPanelRowSpec = True + pres@gsnPanelBottom = 0.15 ; save space for legend + + if (isdefined("lgres")) + delete(lgres) + end if + lgres = True ; allow legend resources + lgres@lgLineColors = res@xyLineColors ; set these equal to those + lgres@lgPerimOn = False + lgres@lgDashIndexes = res@xyDashPatterns ; ditto + lgres@lgLabelFontHeightF = 0.015 ; increase font height + lgres@lgLineLabelStrings = (/"","",""/) ; no labels in lines + lgres@lgAutoManage = False ; must have to adjust fonts + lgres@vpWidthF = 0.45 + lgres@vpHeightF = 0.15 + if (p .eq. 0) then ; landf (7 vars) + cases(2) = "Willmott-Matsuura(T,P),GRDC(R),USAF/ETAC(S),FLUXNET(LHF,GPP),MODIS(LAI)" + cases(3) = "CMC 1980-1996" + pres@gsnPanelCenter = False + pres@gsnPanelYWhiteSpacePercent = 4 + pres@gsnPanelXWhiteSpacePercent = 4 + gsn_panel(wks,plot,(/2,2,2,2/),pres) ; draw panel plot + lgres@vpWidthF = 0.80 + lgres@lgLineLabelsOn = False + lgres@lgLineColors = res@xyLineColors ; set these equal to those + lgres@lgDashIndexes = res@xyDashPatterns ; ditto + gsn_legend_ndc(wks,4,cases(0:3),0.15,0.15,lgres) ; draw common legend MAY HAVE TO ADJUST Y + frame(wks) + end if + if (p .eq. 1) then ; radf (7 vars) + pres@gsnPanelCenter = False + gsn_panel(wks,plot,(/2,2,2,1/),pres) + ; gsn_legend_ndc(wks,2,cases,0.35,0.10,lgres) ; draw common legend MAY HAVE TO ADJUST Y + legend = create "Legend" legendClass wks + "vpXF" : 0.51 + "vpYF" : 0.33 + "vpWidthF" : 0.25 + "vpHeightF" : 0.125 + "lgPerimOn" : False + "lgItemCount" : 2 + "lgLabelsOn" : True + "lgLineLabelsOn" : False + "lgLabelStrings" : cases(0:1) + "lgDashIndexes" : (/0,0/) + "lgLineColors" : (/"red","blue"/) + "lgMonoLineLabelFontColor" : True + "lgAutoManage" : False + "lgLabelFontHeightF" : 0.0125 + "lgLabelPosition" : "Center" + "lgLabelAlignment" : "AboveItems" + end create + draw(legend) + frame(wks) + delete(legend) + end if + if (p .eq. 2) then ; turbf (9) + pres@gsnPanelCenter = False + gsn_panel(wks,plot,(/2,2,2,2,2/),pres) + legend = create "Legend" legendClass wks + "vpXF" : 0.30 + "vpYF" : 0.16 + "vpWidthF" : 0.20 + "vpHeightF" : 0.1 + "lgPerimOn" : False + "lgItemCount" : 2 + "lgLabelsOn" : True + "lgLineLabelsOn" : False + "lgLabelStrings" : cases(0:1) + "lgDashIndexes" : (/0,0/) + "lgLineColors" : (/"red","blue"/) + "lgMonoLineLabelFontColor" : True + "lgAutoManage" : False + "lgLabelFontHeightF" : 0.015 + "lgLabelPosition" : "Center" + "lgLabelAlignment" : "AboveItems" + end create + draw(legend) + frame(wks) + delete(legend) + end if + if (p .eq. 3 ) then ; cn (10 vars each) + pres@gsnPanelCenter = False + if (clamp .eq. 1 .or. casa .eq. 1) then + gsn_panel(wks,plot,(/2,2,2/),pres) ; draw panel plot + gsn_legend_ndc(wks,2,cases(0:1),0.38,0.13,lgres) ; draw common legend MAY HAVE TO ADJUST Y + frame(wks) + else + gsn_panel(wks,plot,(/2,2,2,2,2/),pres) ; draw panel plot + ;gsn_legend_ndc(wks,2,cases(0:1),0.35,0.13,lgres) ; draw common legend MAY HAVE TO ADJUST Y + legend = create "Legend" legendClass wks + "vpXF" : 0.30 + "vpYF" : 0.16 + "vpWidthF" : 0.20 + "vpHeightF" : 0.1 + "lgPerimOn" : False + "lgItemCount" : 2 + "lgLabelsOn" : True + "lgLineLabelsOn" : False + "lgLabelStrings" : cases(0:1) + "lgDashIndexes" : (/0,0/) + "lgLineColors" : (/"red","blue"/) + "lgMonoLineLabelFontColor" : True + "lgAutoManage" : False + "lgLabelFontHeightF" : 0.015 + "lgLabelPosition" : "Center" + "lgLabelAlignment" : "AboveItems" + end create + draw(legend) + frame(wks) + delete(legend) + end if + end if + if (p .eq. 4) then ; cn and fire (6 vars each) + pres@gsnPanelCenter = False + gsn_panel(wks,plot,(/2,2,2/),pres) ; draw panel plot + gsn_legend_ndc(wks,2,cases(0:1),0.35,0.17,lgres) ; draw common legend MAY HAVE TO ADJUST Y + frame(wks) + end if + if (p .eq. 5) then + pres@gsnPanelCenter = True + gsn_panel(wks,plot,(/1,2/),pres) ; draw panel plot + lgres@lgLabelFontHeightF = 0.015 ; increase font height + gsn_legend_ndc(wks,3,labels,0.275,0.22,lgres) ; draw common legend MAY HAVE TO ADJUST Y + frame(wks) + delete(labels) + end if + if (p .eq. 6) then + pres@gsnPanelCenter = True + gsn_panel(wks,plot,(/2,2,1/),pres) ; draw panel plot + lgres@lgLabelFontHeightF = 0.028 + gsn_legend_ndc(wks,2,cases(0:1),0.275,0.17,lgres) ; draw common legend MAY HAVE TO ADJUST Y + frame(wks) + lgres@lgLabelFontHeightF = 0.015 + end if + if (p .eq. 7) then ; snow + cases(2) = "USAF" + cases(3) = "CMC/NOAA-AVHRR" + pres@gsnPanelCenter = True + pres@gsnPanelYWhiteSpacePercent = 4 + pres@gsnPanelXWhiteSpacePercent = 4 + gsn_panel(wks,plot,(/2,2/),pres) ; draw panel plot + + delete(lgres) + lgres = True ; allow legend resources + lgres@lgPerimOn = False + lgres@lgLabelFontHeightF = 0.015 ; increase font height + lgres@lgLineLabelStrings = (/"","",""/) ; no labels in lines + lgres@lgAutoManage = False ; must have to adjust fonts + lgres@vpWidthF = 0.55 + lgres@vpHeightF = 0.15 + lgres@lgLineColors = (/"red","blue","blue","black"/) + lgres@lgLineThicknesses = (/2.,2.,2,1.5/) + lgres@lgDashIndexes = (/0.,0.,16.,16./) ; solid, dashed + lgres@lgLineLabelsOn = False + gsn_legend_ndc(wks,4,cases(0:3),0.225,0.23,lgres) ; draw common legend MAY HAVE TO ADJUST Y + gsn_panel(wks,plot,(/2,2/),pres) ; draw panel plot + frame(wks) + end if + if (p .eq. 8) then ; snow + cases(2) = "MODIS 2001-2003" + delete(lgres) + lgres = True ; allow legend resources + lgres@lgPerimOn = False + lgres@lgLabelFontHeightF = 0.015 ; increase font height + lgres@lgLineLabelStrings = (/"","",""/) ; no labels in lines + lgres@lgAutoManage = False ; must have to adjust fonts + lgres@vpWidthF = 0.55 + lgres@vpHeightF = 0.15 + lgres@lgLineColors = res@xyLineColors ; set these equal to those + lgres@lgDashIndexes = res@xyDashPatterns ; ditto + lgres@lgLineLabelsOn = False + gsn_legend_ndc(wks,3,cases(0:2),0.225,0.15,lgres) ; draw common legend MAY HAVE TO ADJUST Y + gsn_panel(wks,plot,(/2,2,1/),pres) ; draw panel plot + frame(wks) + end if + +;; frame(wks) + + end do ; end of region loop +end if ; end of plot_check loop + + + delete(vars) + delete(vcheck1) + delete(vcheck2) + delete(array1) + delete(array2) + delete(array3) + delete(array4) + if (isvar("array5")) then + delete(array5) + end if + if (isvar("array6")) then + delete(array6) + end if + if (isvar("array7")) then + delete(array7) + end if + delete(scale_type) + delete(plot) + delete(units) + delete(long_name) + delete(vv) + delete(cases) + delete(pres@gsnPanelRowSpec) + delete(titstr) + delete(res@xyLineColors) + delete(res@xyDashPatterns) + delete(sFlag) + delete(aFlag) + + end do ; end p plot loop + + delete(landmask3) + delete(landmask4) + delete(landmask5) + delete(landmask6) + delete(landmask7) + delete(area2) + delete(area3) + delete(area4) + delete(area5) + delete(area6) + delete(area7) + +;---------------------------------------------- +; Create global maps delineating regions (on model grid) +;---------------------------------------------- + +;---------------------------------------------- +; common plot resources +;---------------------------------------------- + delete(res) + res = True + res@mpProjection = "Robinson" + res@mpPerimOn = False + res@mpGridLineColor = "transparent" + res@cnFillOn = True + res@cnLineLabelsOn = False + res@lbLabelBarOn = False + res@cnLinesOn = False + res@mpGridLonSpacingF = 180.0 ; Set lon grid spacing + res@mpGridLatSpacingF = 90.0 ; Set lat grid spacing + res@mpFillOn = False + res@mpOutlineOn = True + res@mpGridAndLimbOn = True + res@mpGridLineDashPattern = 2 ; Set grid line dash pattern (dash) + res@mpGridLineThicknessF = 0.5 ; Set grid line thickness + sstr=(/" "/) + res@gsnMaximize = True + res@cnLevelSelectionMode = "ExplicitLevels" + wks = gsn_open_wks(plot_type,wkdir+"set3_reg_all") + cmap = (/"(/1.00, 1.00, 1.00/)", "(/0.00, 0.00, 0.00/)", \ + "(/1.00, 1.00, 1.00/)", "Blue" /) + gsn_define_colormap(wks, cmap ) + res@gsnLeftString = " " + res@gsnRightString = " " + res@cnLevels = (/0./) + + flag1 = area1 + flag1 = -0.1 + + if (paleo .eq. "True") then + startRegion = nreg-3 ; N. + S. hemisphere land + global + else + startRegion = 0 + end if + + if (paleo .eq. "True") then + landfrac = in1->landfrac + oro = new((/nlat1,nlon1/),integer) + oro = 1 + oro@_FillValue = 0 + oro = where(landfrac .gt. 0.0, 1,0) +; fname = wkdir+prefix(0) +; fname = wkdir+"basename" + fname = wkdir+getenv(name1) + paleo_outline(oro,lat1,lon1,1, fname) + res@mpDataBaseVersion = "Ncarg4_1" + res@mpDataSetName = fname + do ar = startRegion, nreg-1 ; don't include northern and southern hemisphere and global + flag1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) = 0.1*fland1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) - 1.e-36 + end do + else + do ar = 0, nreg-4 ; don't include northern and southern hemisphere and global + flag1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) = 0.1*fland1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) - 1.e-36 + end do + end if + + res@gsnCenterString = "Model Data Regions - All" + plot = gsn_csm_contour_map(wks, flag1, res) + + do ar = startRegion, nreg-1 +;---------------------------------------------- +; Open file for plots +;---------------------------------------------- + wks = gsn_open_wks(plot_type,wkdir+"set3_reg_"+ps_id(ar)) + cmap = (/"(/1.00, 1.00, 1.00/)", "(/0.00, 0.00, 0.00/)", \ + "(/1.00, 1.00, 1.00/)", "Blue" /) + gsn_define_colormap(wks, cmap ) + flag1 = -0.1 + flag1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) = 0.1*fland1 ({box_s(ar):box_n(ar)},{box_w(ar):box_e(ar)}) - 1.e-36 + res@gsnCenterString = "Model Data Regions - " + ptitstr(ar) + boxstr(ar) + plot = gsn_csm_contour_map(wks, flag1, res) + end do + + print ("=========================================") + print ("Finish Time: "+systemfunc("date") ) + print ("=========================================") + + ; remove error file if program completes successfully. + filo = wkdir +"/set3_error_file" + system("/bin/rm -f " + filo) + +end diff --git a/lnd_diag/shared/create_var_lists.csh b/lnd_diag/shared/create_var_lists.csh index 73955a7d..f283a38f 100755 --- a/lnd_diag/shared/create_var_lists.csh +++ b/lnd_diag/shared/create_var_lists.csh @@ -9,6 +9,10 @@ cat $INPUT_FILES/set2_clm.txt > ${WKDIR} cat $INPUT_FILES/set3_*.txt > ${WKDIR}master_set3.txt cat $INPUT_FILES/set5_clm.txt $INPUT_FILES/set5_hydReg.txt > ${WKDIR}master_set5.txt cat $INPUT_FILES/set6_*.txt > ${WKDIR}master_set6.txt +cat $INPUT_FILES/set10_*.txt > ${WKDIR}master_set10.txt +cat $INPUT_FILES/set11_*.txt > ${WKDIR}master_set11.txt +cat $INPUT_FILES/set12_*.txt > ${WKDIR}master_set12.txt + # If CN is on, add all regular cn variables if ($CN == 1) then diff --git a/lnd_diag/shared/lnd_create_webpage.pl b/lnd_diag/shared/lnd_create_webpage.pl index 15b4c37c..81e06959 100755 --- a/lnd_diag/shared/lnd_create_webpage.pl +++ b/lnd_diag/shared/lnd_create_webpage.pl @@ -21,6 +21,9 @@ $set_8 = $ENV{'set_8'}; $set_8_lnd = $ENV{'set_8_lnd'}; $set_9 = $ENV{'set_9'}; + $set_10 = $ENV{'set_10'}; + $set_11 = $ENV{'set_11'}; + $set_12 = $ENV{'set_12'}; ## $prefix_1 = $ENV{'prefix_1'}; $prefix_1 = $ENV{'CASE'}; @@ -89,8 +92,8 @@ ##if($runtype eq "model-obs") { $set_9=0; } if($runtype eq "model-obs") { $set_9 = 'False'; } -@setList = (1,2,3,4,5,6,7,8,9); -@status = ($set_1, $set_2, $set_3, $set_4, $set_5, $set_6, $set_7,$set_8,$set_9); +@setList = (1,2,3,4,5,6,7,8,9,10,11,12); +@status = ($set_1, $set_2, $set_3, $set_4, $set_5, $set_6, $set_7,$set_8,$set_9,$set_10,$set_11,$set_12); for $set (@setList) { @@ -122,6 +125,9 @@ if ($set == 7) { &set7Page; } if ($set == 8) { &set8Page; } if ($set == 9) { &set9Page; } + if ($set == 10) { &set10Page; } + if ($set == 11) { &set11Page; } + if ($set == 12) { &set12Page; } } } &clickablePlotTypes(@status); @@ -272,12 +278,12 @@ sub setHeader sub clickablePlotTypes { printf(fp_main "\n"); - @setList = (1,2,3,4,6,7,8); + @setList = (1,2,3,4,6,7,8,10,11,12); printf(fp_main "
\n"); printf(fp_main "Click on Plot Type

\n"); for $set (@setList) { - if ($set == 3 | $set == 4) { $sn = "s"; } + if ($set == 3 | $set == 4 | $set == 12) { $sn = "s"; } else { $sn = "sn"; } ## if( @status[$set-1] == "True") { if( @status[$set-1] eq "True") { @@ -365,6 +371,21 @@ sub setDescription $l2 = " Statistics include DJF, JJA, and ANN biases, and RMSE, correlation and standard deviation of the annual cycle relative to observations
"; $l = $l1.$l2; } + if ($set == 10) + { + $l = "Horizontal contour plots of DJF, MAM, JJA, SON, and ANN means, zoomed in on the Greenland ice sheet (courtesy: Jan Lenaerts, jtmlenaerts@gmail.com)
"; + } + if ($set == 11) + { + $l = "Horizontal contour plots of DJF, MAM, JJA, SON, and ANN means, zoomed in on the Antarctic ice sheet (courtesy: Jan Lenaerts, jtmlenaerts@gmail.com)
"; + } + if ($set == 12) + { + $l1 = "Line plots of monthly climatology: regional air temperature, precipitation,"; + $l2 = " runoff, snow depth, radiative fluxes, and turbulent fluxes,"; + $l3 = " ice sheet cells only (courtesy: Jan Lenaerts, jtmlenaerts@gmail.com)
"; + $l = $l1.$l2.$l3; + } return($l) } @@ -566,6 +587,7 @@ sub set2Page } } + sub set3and6Page { %polar = ( 'Alaskan_Arctic', 'Alaskan Arctic', # polar @@ -1149,3 +1171,518 @@ sub set9Page } printf fp_wp "\n"; } + + + +sub set10Page +{ + # set-specific header + @time = ("DJF","MAM","JJA","SON","ANN"); + printf fp_wp "
\n"; + $snF = 0; + @vSP = ("TSA","Q2M","RH2M","U10","SNOW","RAIN","PBOT", + "FSH","QSOIL","FGR","FLDS","FIRE","FSDS","FSR","ASA", + "QICE_FRZ","QICE_MELT","QSNOMELT","QICE"); + @vCN = ("TSA","Q2M","RH2M","U10","SNOW","RAIN","PBOT", + "FSH","QSOIL","FGR","FLDS","FIRE","FSDS","FSR","ASA", + "QICE_FRZ","QICE_MELT","QSNOMELT","QICE"); + if ($cn) { @VarArray = @vCN } + else { @VarArray = @vSP } + foreach $varname (@VarArray) + { + if ($varname eq "TSA") { + printf fp_wp "RACMO2.3: near-sfc atm & precip\n"; + printf fp_wp "DJF\n MAM\n JJA\n SON\n ANN\n"; + printf fp_wp "\n"; + $v2=""; + } + if ($varname eq "FSH" ) { + printf fp_wp "
\n"; + printf fp_wp "RACMO2.3: Fluxes\n"; + printf fp_wp "\n"; + $v2=""; + } + if ($varname eq "QICE_FRZ" ) { + printf fp_wp "
\n"; + printf fp_wp "RACMO2.3: SMB and components\n"; + printf fp_wp "\n"; + $v2=""; + } + $longName = &getLongName($varname); + printf fp_wp " $varname    \n"; + printf fp_wp " $longName    \n"; + foreach $t (@time) { + $filename = "set10_".$t."_".$varname.$v2.$sfx; + $file = $webdir."/set10/".$filename; + $href = $filename; + if (!-e $file) { printf fp_wp " ---\n"; } + if ( -e $file) { printf fp_wp " plot\n"; } + } + printf fp_wp "\n"; + } + + printf fp_wp "\n"; + printf fp_wp "\n"; + printf fp_wp " Model Only\n"; + printf fp_wp "DJF\n MAM\n JJA\n SON\n ANN\n"; + printf fp_wp "\n"; + $inFile = $wkdir."/master_set".$set.".txt"; + close(fp_in); + open(fp_in,"<"."$inFile") || die "Can't open set input set10Page filelist ($inFile) \n"; + + $ctr=0; + while() + { + @line = split(/\s+/,$_); + $varname = @line[1]; + + # skip model vs obs plots. + # if ($varname eq "TSA" | $varname eq "Q2M" | $varname eq "RH2M" | $varname eq "U10" | + #$varname eq "FSNO"| $varname eq "H2OSNO" | $varname eq "LHEAT" | $varname eq "GPP" | + # $varname eq "TLAI" ) { next; } + + $longName = &getLongName($varname); + # $suffix = substr($filename,(length($filename)-5),1); + if ($varname eq "TLAKE" | $varname eq "SOILLIQ" | $varname eq "SOILICE" | $varname eq "H2OSOI" | $varname eq "TSOI") { + # foreach $layer (0 .. 9) { + foreach $layer (0, 4, 9) { + $lyr = $layer+1; + printf fp_wp " $varname ($lyr)    \n"; + printf fp_wp " $longName    \n"; + foreach $t (@time) { + $filename = "set10_".$t."_".$varname."_".$layer.$sfx; + $file = $webdir."/set10/".$filename; + $href = $filename; + if (!-e $file) { printf fp_wp " ---\n"; } + if ( -e $file) { printf fp_wp " plot\n"; } + } + printf fp_wp "\n"; + } + } + else { + printf fp_wp " $varname    \n"; + printf fp_wp " $longName    \n"; + foreach $t (@time) { + $filename = "set10_".$t."_".$varname.$sfx; + $file = $webdir."/set10/".$filename; + $href = $filename; + if (!-e $file) { printf fp_wp " ---\n"; } + if ( -e $file) { printf fp_wp " plot\n"; } + } + printf fp_wp "\n"; + } + } +} + + +sub set11Page +{ + # set-specific header + @time = ("DJF","MAM","JJA","SON","ANN"); + printf fp_wp "
\n"; + $snF = 0; + @vSP = ("TSA","Q2M","RH2M","U10","SNOW","RAIN","PBOT", + "FSH","QSOIL","FGR","FLDS","FIRE","FSDS","FSR","ASA", + "QICE_FRZ","QICE_MELT","QSNOMELT","QICE"); + @vCN = ("TSA","Q2M","RH2M","U10","SNOW","RAIN","PBOT", + "FSH","QSOIL","FGR","FLDS","FIRE","FSDS","FSR","ASA", + "QICE_FRZ","QICE_MELT","QSNOMELT","QICE"); + if ($cn) { @VarArray = @vCN } + else { @VarArray = @vSP } + foreach $varname (@VarArray) + { + if ($varname eq "TSA") { + printf fp_wp "RACMO2.3: near-sfc atm & precip\n"; + printf fp_wp "DJF\n MAM\n JJA\n SON\n ANN\n"; + printf fp_wp "\n"; + $v2=""; + } + if ($varname eq "FSH" ) { + printf fp_wp "
\n"; + printf fp_wp "RACMO2.3: Fluxes\n"; + printf fp_wp "\n"; + $v2=""; + } + if ($varname eq "QICE_FRZ" ) { + printf fp_wp "
\n"; + printf fp_wp "RACMO2.3: SMB and components\n"; + printf fp_wp "\n"; + $v2=""; + } + $longName = &getLongName($varname); + printf fp_wp " $varname    \n"; + printf fp_wp " $longName    \n"; + foreach $t (@time) { + $filename = "set11_".$t."_".$varname.$v2.$sfx; + $file = $webdir."/set11/".$filename; + $href = $filename; + if (!-e $file) { printf fp_wp " ---\n"; } + if ( -e $file) { printf fp_wp " plot\n"; } + } + printf fp_wp "\n"; + } + + printf fp_wp "\n"; + printf fp_wp "\n"; + printf fp_wp " Model Only\n"; + printf fp_wp "DJF\n MAM\n JJA\n SON\n ANN\n"; + printf fp_wp "\n"; + $inFile = $wkdir."/master_set".$set.".txt"; + close(fp_in); + open(fp_in,"<"."$inFile") || die "Can't open set input set11Page filelist ($inFile) \n"; + + $ctr=0; + while() + { + @line = split(/\s+/,$_); + $varname = @line[1]; + + # skip model vs obs plots. + # if ($varname eq "TSA" | $varname eq "Q2M" | $varname eq "RH2M" | $varname eq "U10" | + #$varname eq "FSNO"| $varname eq "H2OSNO" | $varname eq "LHEAT" | $varname eq "GPP" | + # $varname eq "TLAI" ) { next; } + + $longName = &getLongName($varname); + # $suffix = substr($filename,(length($filename)-5),1); + if ($varname eq "TLAKE" | $varname eq "SOILLIQ" | $varname eq "SOILICE" | $varname eq "H2OSOI" | $varname eq "TSOI") { + # foreach $layer (0 .. 9) { + foreach $layer (0, 4, 9) { + $lyr = $layer+1; + printf fp_wp " $varname ($lyr)    \n"; + printf fp_wp " $longName    \n"; + foreach $t (@time) { + $filename = "set11_".$t."_".$varname."_".$layer.$sfx; + $file = $webdir."/set11/".$filename; + $href = $filename; + if (!-e $file) { printf fp_wp " ---\n"; } + if ( -e $file) { printf fp_wp " plot\n"; } + } + printf fp_wp "\n"; + } + } + else { + printf fp_wp " $varname    \n"; + printf fp_wp " $longName    \n"; + foreach $t (@time) { + $filename = "set11_".$t."_".$varname.$sfx; + $file = $webdir."/set11/".$filename; + $href = $filename; + if (!-e $file) { printf fp_wp " ---\n"; } + if ( -e $file) { printf fp_wp " plot\n"; } + } + printf fp_wp "\n"; + } + } +} + + +sub set12Page +{ + %polar = ( 'Alaskan_Arctic', 'Alaskan Arctic', # polar + 'Canadian_Arctic', 'Canadian Arctic', + 'Greenland', 'Greenland', + 'Russian_Arctic', 'Russian Arctic', + 'Polar', 'Polar', + 'Antarctica', 'Antarctica', ); + + %boreal = ( 'Alaska', 'Alaska', # boreal + 'Northwest_Canada', 'Northwest Canada', + 'Central_Canada', 'Central Canada', + 'Eastern_Canada', 'Eastern Canada', + 'Northern_Europe', 'Northern Europe', + 'Western_Siberia', 'Western Siberia', + 'Lost_BorealForest', 'Lost Boreal Forest', + 'Eastern_Siberia', 'Eastern Siberia', ); + + %midLat = ( 'Western_US', 'Western U.S.', # middle latitudes + 'Central_US', 'Central U.S.', + 'Eastern_US', 'Eastern U.S.', + 'Europe', 'Europe', + 'Mediterranean', 'Mediterranean', ); + + %trpRF = ( 'Central_America', 'Central America', # Tropical Rainforests + 'Amazonia', 'Amazonia', + 'Central_Africa', 'Central Africa', + 'Indonesia', 'Indonesia', ); + + %trpSav = ( 'Brazil', 'Brazil', # Tropical Savannas + 'Sahel', 'Sahel', + 'Southern_Africa', 'Southern Africa', + 'India', 'India', + 'Indochina', 'Indochina', ); + + %arid = ( 'Sahara_Desert', 'Sahara Desert', # arid + 'Arabian_Peninsula','Arabian Peninsula', + 'Australia', 'Australia', + 'Central_Asia', 'Central Asia', + 'Mongolia', 'Mongolia', + 'Tigris_Euphrates', 'Tigris_Euphrates'); + + %highlands = ('Tibetan_Plateau', 'Tibetan Plateau', ); # highland + + %asia = ('Asia', 'Central Asia', # Liya Jin + 'Mongolia_China', 'Central and Eastern Mongolia and NE China', + 'Eastern_China', 'Eastern China', + 'Tibet', 'Tibetan_Plateau', + 'Southern_Asia', 'Southern Asia', + 'NAfrica_Arabia', 'Sahara Desert and Arabian Peninsula', + 'Med_MidEast', 'Mediterranean and Western Asia', ); + + %glHem = ( 'N_H_Land', 'Northern Hemisphere Land', # global and hemispheric + 'S_H_Land', 'Southern Hemisphere Land', + 'Global', 'Global Land', ); + + + # == Set 3 + if ( $cn) { + if ( $hydro) { @set3fluxes = ("landf","radf","turbf","cnFlx","frFlx","moistEnergyFlx","snow","albedo","hydro"); } + else { @set3fluxes = ("landf","radf","turbf","cnFlx","frFlx","moistEnergyFlx","snow","albedo"); } + } + else { + if ( $hydro) { @set3fluxes = ("landf","radf","turbf","moistEnergyFlx","snow","albedo","hydro"); } + else { @set3fluxes = ("landf","radf","turbf","moistEnergyFlx","snow","albedo"); } + } + # == Set 6 + if ( $cn) { + if ( $hydro) { + @set6fluxes = ("landf","radf","turbf","cnFlx","frFlx","crbStock","tsoi","soilliq","soilice", + "soilliqIce","snowliqIce","hydro"); + } + else { @set6fluxes = ("landf","radf","turbf","cnFlx","frFlx","crbStock","tsoi","soilliq","soilice", + "soilliqIce","snowliqIce"); + } + } + else { + if ( $hydro) { + @set6fluxes = ("landf","radf","turbf","tsoi","soilliq","soilice","soilliqIce","snowliqIce","hydro"); + } + else { + @set6fluxes = ("landf","radf","turbf","tsoi","soilliq","soilice","soilliqIce","snowliqIce"); + } + } + @regions = ("HEMISPHERIC AND GLOBAL","POLAR","BOREAL","MIDDLE LATITUDES","TROPICAL RAINFOREST","TROPICAL SAVANNA", + "ARID","HIGHLAND","ASIA"); + @paleo_regions = ("HEMISPHERIC AND GLOBAL"); + + # print set-specific header information + printf fp_wp "\n"; + printf fp_wp "\n All Model Data Regions\n"; + if ($set == 3) { $n2 = "set3_reg_all".$sfx; } + if ($set == 6) { $n2 = "set6_reg_all".$sfx; } + printf fp_wp " map\n"; + printf fp_wp "\n\n"; + if ($set == 3) { + printf fp_wp " \n \n GPP\n \n \n"; + printf fp_wp "\n"; + printf fp_wp " \n \n Latent Heat\n \n \n"; + printf fp_wp "\n"; + } + printf fp_wp " \n \n Temp\n \n \n"; + printf fp_wp "\n"; + if ($set == 3) { + # CN active + if ( $cn) { + printf fp_wp " \n \n Precip\n \n \n Carbon \n Energy/Moist\n"; + printf fp_wp "\n"; + printf fp_wp " \n \n Runoff\n Radiative\n Turbulent\n Nitrogen\n"; + printf fp_wp " Fire Control of\nSnow\nALbedo\n"; + printf fp_wp "\n"; + printf fp_wp " \n Map\n SnowDepth\n Fluxes\n Fluxes\n Fluxes\n"; + if ( $hydro) { printf fp_wp " Fluxes\n Evap\n vs Obs\n vs Obs\n Hydrology\n"; } + else { printf fp_wp " Fluxes\n Evap\n vs Obs\n vs Obs\n"; } + printf fp_wp "\n"; + # CN Not Active + } else { + + printf fp_wp " \n \n Precip\n \n \n Energy/moisture\n"; + printf fp_wp "\n"; + printf fp_wp " \n \n Runoff\n Radiative\n Turbulent\n Control of\nSnow\nAlbedo\n"; + printf fp_wp "\n"; + if ( $hydro) { printf fp_wp " \n Map\n SnowDepth\n Fluxes\n Fluxes\n Evap\nvsObs\n vsObs\n Hydrology\n"; } + else { printf fp_wp " \n Map\n SnowDepth\n Fluxes\n Fluxes\n Evap\nvsObs\n vsObs\n \n"; } + printf fp_wp "\n"; + } + } + else + { + if ( $cn) { + printf fp_wp " \n \n Precip\n \n \n Carbon/\n"; + printf fp_wp "\n"; + printf fp_wp " \n \n Runoff\n Radiative\n Turbulent\n Nitrogen\n Fire\n Carbon\n"; + printf fp_wp " Soil\n SoilLiq\n \n TotalSoilIce\n TotalSnowH2O\n"; + printf fp_wp "\n"; + printf fp_wp " \n Map\n SnowDepth\n Fluxes\n Fluxes\n Fluxes\n Fluxes\n Stocks\n"; + if ( $hydro) { + printf fp_wp " Temp\n Water\n SoilIce\n TotalSoilH2O\n TotalSnowIce\n Hydrology\n"; + } else { + printf fp_wp " Temp\n Water\n SoilIce\n TotalSoilH2O\n TotalSnowIce\n"; + } + printf fp_wp "\n"; + } else { + printf fp_wp " \n \n Precip\n \n \n \n"; + printf fp_wp "\n"; + printf fp_wp " \n \n Runoff\n Radiative\n Turbulent\n"; + printf fp_wp " Soil\n SoilLiq\n \n TotalSoilIce\n TotalSnowH2O\n"; + printf fp_wp "\n"; + printf fp_wp " \n Map\n SnowDepth\n Fluxes\n Fluxes\n"; + if ( $hydro) { + printf fp_wp " Temp\n Water\n SoilIce\n TotalSoilH2O\n TotalSnowIce\n Hydrology\n"; + } else { + printf fp_wp " Temp\n Water\n SoilIce\n TotalSoilH2O\n TotalSnowIce\n"; + } + printf fp_wp "\n"; + } + } + + if ($paleo eq 'True') { + @use_region = @paleo_regions; + } + else { + @use_region = @regions; + } + foreach $region (@use_region) + { + undef $nList; + if ($region eq "POLAR") { %nList = %polar; } + if ($region eq "BOREAL") { %nList = %boreal; } + if ($region eq "MIDDLE LATITUDES") { %nList = %midLat; } + if ($region eq "TROPICAL RAINFOREST") { %nList = %trpRF; } + if ($region eq "TROPICAL SAVANNA") { %nList = %trpSav; } + if ($region eq "ARID") { %nList = %arid; } + if ($region eq "HIGHLAND") { %nList = %highlands;} + if ($region eq "ASIA") { %nList = %asia; } + if ($region eq "HEMISPHERIC AND GLOBAL") { %nList = %glHem; } + + &writeRegion(); + } + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +} + diff --git a/lnd_diag/shared/lnd_func.ncl b/lnd_diag/shared/lnd_func.ncl index b9be8ad7..61a91377 100755 --- a/lnd_diag/shared/lnd_func.ncl +++ b/lnd_diag/shared/lnd_func.ncl @@ -1459,6 +1459,196 @@ begin end ;*************************************************************************** +undef("set10Res") +function set10Res(res) +begin + raster = getenv("raster") + paleo = getenv("paleo") + proj_type = getenv("projection") + + res = True + res@cnFillOn = True + res@cnMissingValFillColor = "white" +;; if (raster .eq. 1) then + if (raster .eq. "True") then +;; res@cnRasterModeOn = True + res@cnFillMode = "RasterFill" + end if +;; if (paleo .eq. 1) then + if (paleo .eq. "True") then + res@cnFillMode = "CellFill" + end if + res@cnInfoLabelOn = False + res@cnLinesOn = False + res@gsnDraw = False + res@gsnFrame = False + res@lbTitleOn = False + res@tiMainFont = "Helvetica" + res@cnLabelBarEndStyle = "IncludeMinMaxLabels" + res@mpPerimOn = False + res@mpGridLineColor = "transparent" + ; Greenland zoom + + res@mpLimitMode = "LatLon" ; method to zoom + res@mpMaxLatF = 84 + res@mpMinLatF = 59 + res@mpMaxLonF = -25 + res@mpMinLonF = -55 + + res@mpGridAndLimbOn = False + res@mpFillOn = False + + res@mpProjection = "Stereographic" + res@mpCenterLonF = -40. + res@mpCenterLatF = 50. + + res@gsnLeftStringParallelPosF = 0.1 + res@gsnRightStringParallelPosF = 0.9 + res@mpFillOn = False + res@mpOutlineOn = True + res@mpGridAndLimbOn = True + return(res) +end + + +;************************************************************************************************** +undef("set10ResMvM") +function set10ResMvM(res) +begin + raster = getenv("raster") + paleo = getenv("paleo") + proj_type = getenv("projection") + + res = True + res@cnFillOn = True + res@cnMissingValFillColor = "white" +;; if (raster .eq. 1) then + if (raster .eq. "True") then +;; res@cnRasterModeOn = True + res@cnFillMode = "RasterFill" + end if +;; if (paleo .eq. 1) then + if (paleo .eq. "True") then + res@cnFillMode = "CellFill" + end if + ; Greenland zoom + + res@mpLimitMode = "LatLon" ; method to zoom + res@mpMaxLatF = 84 + res@mpMinLatF = 59 + res@mpMaxLonF = -25 + res@mpMinLonF = -55 + + res@mpGridAndLimbOn = False + res@mpFillOn = False + + res@mpProjection = "Stereographic" + res@mpCenterLonF = -40. + res@mpCenterLatF = 50. + + res@cnInfoLabelOn = False + res@cnLinesOn = False + res@gsnDraw = False + res@gsnFrame = False +; res@gsnLeftStringParallelPosF = 0.2 ; Values set in set_2.ncl + res@gsnLeftStringOrthogonalPosF = -1.25 + res@tiMainFont = "Helvetica" + res@cnLabelBarEndStyle = "IncludeMinMaxLabels" + res@mpFillOn = False + res@mpOutlineOn = True + res@mpGridAndLimbOn = True + res@gsnPaperOrientation = "landscape" + return(res) + +end + +;*************************************************************************** +undef("set11Res") +function set11Res(res) +begin + raster = getenv("raster") + paleo = getenv("paleo") + proj_type = getenv("projection") + + res = True + res@cnFillOn = True + res@cnMissingValFillColor = "white" +;; if (raster .eq. 1) then + if (raster .eq. "True") then +;; res@cnRasterModeOn = True + res@cnFillMode = "RasterFill" + end if +;; if (paleo .eq. 1) then + if (paleo .eq. "True") then + res@cnFillMode = "CellFill" + end if + res@cnInfoLabelOn = False + res@cnLinesOn = False + res@gsnDraw = False + res@gsnFrame = False + res@lbTitleOn = False + res@tiMainFont = "Helvetica" + res@cnLabelBarEndStyle = "IncludeMinMaxLabels" + res@mpPerimOn = False + res@mpGridLineColor = "transparent" + ; Antarctica zoom + + res@mpGridAndLimbOn = False + res@mpFillOn = False + res@gsnPolar = "SH" + res@mpMaxLatF = -60. + + res@gsnLeftStringParallelPosF = 0.1 + res@gsnRightStringParallelPosF = 0.9 + res@mpFillOn = False + res@mpOutlineOn = True + res@mpGridAndLimbOn = True + return(res) +end + +;************************************************************************************************** +undef("set11ResMvM") +function set11ResMvM(res) +begin + raster = getenv("raster") + paleo = getenv("paleo") + proj_type = getenv("projection") + + res = True + res@cnFillOn = True + res@cnMissingValFillColor = "white" +;; if (raster .eq. 1) then + if (raster .eq. "True") then +;; res@cnRasterModeOn = True + res@cnFillMode = "RasterFill" + end if +;; if (paleo .eq. 1) then + if (paleo .eq. "True") then + res@cnFillMode = "CellFill" + end if + ; Antarctica zoom + + res@mpGridAndLimbOn = False + res@mpFillOn = False + res@gsnPolar = "SH" + res@mpMaxLatF = -60. + + res@cnInfoLabelOn = False + res@cnLinesOn = False + res@gsnDraw = False + res@gsnFrame = False +; res@gsnLeftStringParallelPosF = 0.2 ; Values set in set_2.ncl + res@gsnLeftStringOrthogonalPosF = -1.25 + res@tiMainFont = "Helvetica" + res@cnLabelBarEndStyle = "IncludeMinMaxLabels" + res@mpFillOn = False + res@mpOutlineOn = True + res@mpGridAndLimbOn = True + res@gsnPaperOrientation = "landscape" + return(res) + +end +;************************************************************************************************** undef("getLimits") function getLimits(val) local pCut, pLow, pHigh From 95e484e50819ea9e5db26db3caff5273bd81eb76 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Thu, 27 Apr 2017 17:25:16 -0600 Subject: [PATCH 25/27] update for land-ice diags to correctly generate sets 10 and 11 --- .../lnd/Config/config_diags_lnd.xml | 4 +-- lnd_diag/shared/lnd_lookupTable.pl | 17 +++++++-- ocn_diag/tool_lib/zon_avg/makefile | 36 ------------------- 3 files changed, 16 insertions(+), 41 deletions(-) delete mode 100644 ocn_diag/tool_lib/zon_avg/makefile diff --git a/diagnostics/diagnostics/lnd/Config/config_diags_lnd.xml b/diagnostics/diagnostics/lnd/Config/config_diags_lnd.xml index 9164b457..d91ea782 100644 --- a/diagnostics/diagnostics/lnd/Config/config_diags_lnd.xml +++ b/diagnostics/diagnostics/lnd/Config/config_diags_lnd.xml @@ -830,9 +830,9 @@ Applies to both model and control cases." diff --git a/lnd_diag/shared/lnd_lookupTable.pl b/lnd_diag/shared/lnd_lookupTable.pl index de093eb6..bcc6055d 100755 --- a/lnd_diag/shared/lnd_lookupTable.pl +++ b/lnd_diag/shared/lnd_lookupTable.pl @@ -27,6 +27,9 @@ $set_6 = $ENV{'set_6'}; $set_7 = $ENV{'set_7'}; $set_8 = $ENV{'set_8'}; + $set_9 = $ENV{'set_9'}; + $set_10 = $ENV{'set_10'}; + $set_11 = $ENV{'set_11'}; # -------------------------------- # define auxillary files @@ -38,14 +41,14 @@ # -------------------------------- # start main loop # -------------------------------- - @setList = (1,2,3,4,5,6,7,8); - @status = ($set_1, $set_2, $set_3, $set_4, $set_5, $set_6, $set_7, $set_8); + @setList = (1,2,3,4,5,6,7,8,9,10,11); + @status = ($set_1, $set_2, $set_3, $set_4, $set_5, $set_6, $set_7, $set_8, $set_9, $set_10, $set_11); for $set (@setList) { print "set = $set status = @status[$set-1]\n"; - if( @status[$set-1] == "True") { + if( lc(@status[$set-1]) eq "true") { $mainFile = $webdir."/set".$set."/variableList_".$set.".html"; close(fp_main); @@ -231,6 +234,14 @@ sub setDescription { $l = "Line and contour plots of Ocean/Land/Atmosphere CO2 exchange
"; } + if ($set == 10) + { + $l = "Horizontal contour plots of DJF, MAM, JJA, SON, and ANN means, zoomed in on the Greenland ice sheet
"; + } + if ($set == 11) + { + $l = "Horizontal contour plots of DJF, MAM, JJA, SON, and ANN means, zoomed in on the Antarctic ice sheet
"; + } return($l) } diff --git a/ocn_diag/tool_lib/zon_avg/makefile b/ocn_diag/tool_lib/zon_avg/makefile deleted file mode 100644 index 589182d6..00000000 --- a/ocn_diag/tool_lib/zon_avg/makefile +++ /dev/null @@ -1,36 +0,0 @@ - -# FC, INCLUDE, LIBS obtained with command -# nc-config --fc --includedir --flibs - -FC = ifort -FFLAGS = -c -g -O2 -INCLUDE = -I/glade/apps/opt/netcdf/4.2/intel/12.1.5/include -LIBS = -L/glade/apps/opt/netcdf/4.2/intel/12.1.5/lib -lnetcdff -lnetcdf - -.SUFFIXES: # Delete the default suffixes -.SUFFIXES: .F .F90 .o # Define our suffix list - -.F.o: - $(FC) $(FIXED) $(FFLAGS) $(INCLUDE) $< - -.F90.o: - $(FC) $(FREE) $(FFLAGS) $(INCLUDE) $< - -OBJECTS = main.o kinds_mod.o constants.o arg_wrap.o nf_wrap.o nf_wrap_stubs.o msg_mod.o sphere_area_mod.o POP_grid_mod.o zonal_avg_mod.o - -za: $(OBJECTS) - $(FC) $(OBJECTS) $(LIBS) -o za - -kinds_mod.o: kinds_mod.F90 -arg_wrap.o: arg_wrap.F90 kinds_mod.o -constants.o: constants.F90 kinds_mod.o -msg_mod.o: msg_mod.F90 kinds_mod.o -nf_wrap_stubs.o: nf_wrap_stubs.F90 kinds_mod.o -sphere_area_mod.o: sphere_area_mod.F90 kinds_mod.o constants.o -nf_wrap.o: nf_wrap.F90 kinds_mod.o msg_mod.o nf_wrap_stubs.o -POP_grid_mod.o: POP_grid_mod.F90 kinds_mod.o constants.o nf_wrap.o sphere_area_mod.o -zonal_avg_mod.o: zonal_avg_mod.F90 kinds_mod.o constants.o nf_wrap.o POP_grid_mod.o sphere_area_mod.o -main.o: main.F90 kinds_mod.o constants.o msg_mod.o arg_wrap.o sphere_area_mod.o POP_grid_mod.o zonal_avg_mod.o - -clean: - rm -f *.o *.mod *.l *.lis *.lst \ No newline at end of file From 238ee580acd1a94f3a06a3660dc5b59024d8069c Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Mon, 1 May 2017 12:12:24 -0600 Subject: [PATCH 26/27] bug fixes merged from Keith O. sandbox --- lnd_diag/model1-model2/set_1.ncl | 3 ++- lnd_diag/model1-model2/set_2.ncl | 7 ++++--- lnd_diag/model1-model2/set_6.ncl | 12 +++++++----- lnd_diag/shared/lnd_create_webpage.pl | 4 ++-- 4 files changed, 15 insertions(+), 11 deletions(-) diff --git a/lnd_diag/model1-model2/set_1.ncl b/lnd_diag/model1-model2/set_1.ncl index 760f56ca..10fa1f46 100755 --- a/lnd_diag/model1-model2/set_1.ncl +++ b/lnd_diag/model1-model2/set_1.ncl @@ -204,7 +204,8 @@ trendsFlag = stringtointeger(getenv("trends_match_Flag")) filevar = vars(i) - if (isfilevar(in,filevar))then + info = var_init(vars(i)) + if (isfilevar(in,filevar) .and. info@derivedVariable .eq. False)then if (c13Flag(i) .eq. "C13_") then x = derive_del13C(vars(i),in,scale_type(i),yrs(m),1) else diff --git a/lnd_diag/model1-model2/set_2.ncl b/lnd_diag/model1-model2/set_2.ncl index 07990c1a..51d381ae 100755 --- a/lnd_diag/model1-model2/set_2.ncl +++ b/lnd_diag/model1-model2/set_2.ncl @@ -240,7 +240,8 @@ begin ; not found in the file, then filevar will be "" plot_check = True - if (filevar .ne. "")then + if (filevar .ne. "" .and. info@derivedVariable .eq. False)then + if (isvar("x1")) then delete(x1) end if @@ -276,7 +277,7 @@ begin ; not found in the file, then filevar will be "" plot_check = True - if (filevar .ne. "")then + if (filevar .ne. "" .and. info@derivedVariable .eq. False)then if (isvar("x2")) then delete(x2) end if @@ -805,7 +806,7 @@ begin ; Note: Don't delete res here - save until after obs are posted. res@tiMainString = cases(1) ; set case 2 titles res@gsnCenterString = " (yrs " + yrs_ave2 + ")" - if (vars(i) .eq. "CH4PROD" .or. vars(i) .eq. "CH4_SURF_EBUL_SAT" .or. vars(i) .eq. "CH4_SURF_EBUL_UNSAT" .or. vars(i) .eq. "CPOOL") then + if (vars(i) .eq. "CH4PROD" .or. vars(i) .eq. "CH4_SURF_EBUL_SAT" .or. vars(i) .eq. "CH4_SURF_EBUL_UNSAT" .or. vars(i) .eq. "CPOOL" .or. vars(i) .eq. "FERT" .or. vars(i) .eq. "FERT_TO_SMINN") then if (isatt(res,"cnLabelBarEndStyle")) then if (res@cnLabelBarEndStyle.eq."IncludeMinMaxLabels") then res@cnLabelBarEndStyle = "IncludeOuterBoxes" ; temporarily turn off minmax labels. diff --git a/lnd_diag/model1-model2/set_6.ncl b/lnd_diag/model1-model2/set_6.ncl index 344310fd..36e1b1a2 100755 --- a/lnd_diag/model1-model2/set_6.ncl +++ b/lnd_diag/model1-model2/set_6.ncl @@ -116,7 +116,7 @@ nplots = 12 if (p .eq. 1) then fileName = "set6_radFlx.txt" plot_name = "radf" - titstr = new(7,"string") + titstr = new(8,"string") titstr(0) = "Incoming Solar" titstr(1) = "Albedo" titstr(2) = "Absorbed Solar" @@ -124,6 +124,7 @@ nplots = 12 titstr(4) = "Emitted Longwave" titstr(5) = "Net Longwave" titstr(6) = "Net Radiation" + titstr(7) = "Ground Snow Covered Fraction" end if if (p .eq. 2) then fileName = "set6_turbFlx.txt" @@ -136,7 +137,7 @@ nplots = 12 titstr(4) = "Canopy Evaporation" titstr(5) = "Ground Evaporation" titstr(6) = "Ground Heat + Snow Melt" - titstr(7) = "Soil moisture factor (BTRAN)" + titstr(7) = "Soil moisture factor (BTRAN or BTRANMN)" titstr(8) = "Total LAI" end if if (p .eq. 3) then @@ -162,7 +163,7 @@ nplots = 12 titstr(4) = "Heterotrophic Respiration" titstr(5) = "Ecosystem Respiration" titstr(6) = "Gridcell Surface CH4 Flux" - titstr(7) = "Leached Mineral Nitrogen" + titstr(7) = "Carbon Use Efficiency (NPP/GPP)" titstr(8) = "Soil NO3 Pool Loss to Leaching" titstr(9) = "Soil NO3 Pool Loss to Runoff" else @@ -464,7 +465,8 @@ nplots = 12 ; check for variables in case 1 info = var_init(vars(i)) vcheck1(i) = True - if(isfilevar(in1,vars(i)))then + if(isfilevar(in1,vars(i)) .and. info@derivedVariable .eq. False)then + x1 = in1->$vars(i)$ plot_check = True rank = dimsizes(dimsizes(x1)) @@ -479,7 +481,7 @@ nplots = 12 end if ; check for variables in case 2 vcheck2(i) = True - if(isfilevar(in2,vars(i)))then + if(isfilevar(in2,vars(i)) .and. info@derivedVariable .eq. False)then x2 = in2->$vars(i)$ else if (info@derivedVariable .eq. True) then diff --git a/lnd_diag/shared/lnd_create_webpage.pl b/lnd_diag/shared/lnd_create_webpage.pl index 81e06959..84819862 100755 --- a/lnd_diag/shared/lnd_create_webpage.pl +++ b/lnd_diag/shared/lnd_create_webpage.pl @@ -373,11 +373,11 @@ sub setDescription } if ($set == 10) { - $l = "Horizontal contour plots of DJF, MAM, JJA, SON, and ANN means, zoomed in on the Greenland ice sheet (courtesy: Jan Lenaerts, jtmlenaerts@gmail.com)
"; + $l = "Horizontal contour plots of DJF, MAM, JJA, SON, and ANN means, zoomed in on the Greenland ice sheet
"; } if ($set == 11) { - $l = "Horizontal contour plots of DJF, MAM, JJA, SON, and ANN means, zoomed in on the Antarctic ice sheet (courtesy: Jan Lenaerts, jtmlenaerts@gmail.com)
"; + $l = "Horizontal contour plots of DJF, MAM, JJA, SON, and ANN means, zoomed in on the Antarctic ice sheet
"; } if ($set == 12) { From c2ca1d33eff6aea898f5c1d0266a9143c331925b Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Mon, 1 May 2017 15:58:19 -0600 Subject: [PATCH 27/27] add back in ocn za makefile --- ocn_diag/tool_lib/zon_avg/makefile | 36 ++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 ocn_diag/tool_lib/zon_avg/makefile diff --git a/ocn_diag/tool_lib/zon_avg/makefile b/ocn_diag/tool_lib/zon_avg/makefile new file mode 100644 index 00000000..44140c84 --- /dev/null +++ b/ocn_diag/tool_lib/zon_avg/makefile @@ -0,0 +1,36 @@ + +# FC, INCLUDE, LIBS obtained with command +# nc-config --fc --includedir --flibs + +FC = ifort +FFLAGS = -c -g -O2 +INCLUDE = -I/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/16.0.3/include +LIBS = -L/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/16.0.3/lib -lnetcdff -lnetcdf + +.SUFFIXES: # Delete the default suffixes +.SUFFIXES: .F .F90 .o # Define our suffix list + +.F.o: + $(FC) $(FIXED) $(FFLAGS) $(INCLUDE) $< + +.F90.o: + $(FC) $(FREE) $(FFLAGS) $(INCLUDE) $< + +OBJECTS = main.o kinds_mod.o constants.o arg_wrap.o nf_wrap.o nf_wrap_stubs.o msg_mod.o sphere_area_mod.o POP_grid_mod.o zonal_avg_mod.o + +za: $(OBJECTS) + $(FC) $(OBJECTS) $(LIBS) -o za + +kinds_mod.o: kinds_mod.F90 +arg_wrap.o: arg_wrap.F90 kinds_mod.o +constants.o: constants.F90 kinds_mod.o +msg_mod.o: msg_mod.F90 kinds_mod.o +nf_wrap_stubs.o: nf_wrap_stubs.F90 kinds_mod.o +sphere_area_mod.o: sphere_area_mod.F90 kinds_mod.o constants.o +nf_wrap.o: nf_wrap.F90 kinds_mod.o msg_mod.o nf_wrap_stubs.o +POP_grid_mod.o: POP_grid_mod.F90 kinds_mod.o constants.o nf_wrap.o sphere_area_mod.o +zonal_avg_mod.o: zonal_avg_mod.F90 kinds_mod.o constants.o nf_wrap.o POP_grid_mod.o sphere_area_mod.o +main.o: main.F90 kinds_mod.o constants.o msg_mod.o arg_wrap.o sphere_area_mod.o POP_grid_mod.o zonal_avg_mod.o + +clean: + rm -f *.o *.mod *.l *.lis *.lst \ No newline at end of file