diff --git a/.gitignore b/.gitignore
index a953d2aadd..4b998f4dcb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,8 +1,8 @@
# directories checked out by manage_externals, and other files created
# by manage_externals
manage_externals.log
+ccs_config
/src/fates/
-/tools/site_and_regional/PTCLM/
/cime/
/components/
/libraries/
diff --git a/Externals.cfg b/Externals.cfg
index 1c2293d02f..f596a4bd4f 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -8,7 +8,7 @@ required = True
local_path = components/cism
protocol = git
repo_url = https://github.com/ESCOMP/CISM-wrapper
-tag = cismwrap_2_1_93
+tag = cismwrap_2_1_95
externals = Externals_CISM.cfg
required = True
@@ -33,22 +33,29 @@ repo_url = https://github.com/nmizukami/mizuRoute
hash = 34723c2
required = True
+[ccs_config]
+tag = ccs_config_cesm0.0.5
+protocol = git
+repo_url = https://github.com/ESMCI/ccs_config_cesm.git
+local_path = ccs_config
+required = True
+
[cime]
local_path = cime
protocol = git
repo_url = https://github.com/ESMCI/cime
-tag = branch_tags/cime6.0.12_a01
+tag = cime6.0.13
required = True
[cmeps]
-tag = cmeps0.13.43
+tag = cmeps0.13.47
protocol = git
repo_url = https://github.com/ESCOMP/CMEPS.git
local_path = components/cmeps
required = True
[cdeps]
-tag = cdeps0.12.34
+tag = cdeps0.12.35
protocol = git
repo_url = https://github.com/ESCOMP/CDEPS.git
local_path = components/cdeps
@@ -56,7 +63,7 @@ externals = Externals_CDEPS.cfg
required = True
[cpl7]
-tag = cpl7.0.7
+tag = cpl7.0.12
protocol = git
repo_url = https://github.com/ESCOMP/CESM_CPL7andDataComps
local_path = components/cpl7
@@ -77,7 +84,7 @@ local_path = libraries/mct
required = True
[parallelio]
-tag = pio2_5_4
+tag = pio2_5_5
protocol = git
repo_url = https://github.com/NCAR/ParallelIO
local_path = libraries/parallelio
diff --git a/Externals_CLM.cfg b/Externals_CLM.cfg
index f2916c8bf2..e9c459f094 100644
--- a/Externals_CLM.cfg
+++ b/Externals_CLM.cfg
@@ -2,7 +2,7 @@
local_path = src/fates
protocol = git
repo_url = https://github.com/NGEET/fates
-tag = sci.1.53.0_api.21.0.0
+tag = sci.1.54.0_api.22.0.0
required = True
[externals_description]
diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm
index 30809918ee..b074f43b13 100755
--- a/bld/CLMBuildNamelist.pm
+++ b/bld/CLMBuildNamelist.pm
@@ -2269,7 +2269,14 @@ sub setup_logic_surface_dataset {
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var,
'hgrid'=>$nl_flags->{'res'}, 'ssp_rcp'=>$nl_flags->{'ssp_rcp'}, 'use_vichydro'=>$nl_flags->{'use_vichydro'},
'sim_year'=>$nl_flags->{'sim_year'}, 'irrigate'=>$nl_flags->{'irrigate'},
- 'use_crop'=>$nl_flags->{'use_crop'}, 'glc_nec'=>$nl_flags->{'glc_nec'});
+ 'use_crop'=>$nl_flags->{'use_crop'}, 'glc_nec'=>$nl_flags->{'glc_nec'}, 'nofail'=>1 );
+ if ( ! defined($nl->get_value($var) ) ) {
+ $log->verbose_message( "Exact match of $var NOT found, searching for version with irrigate true" );
+ }
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var,
+ 'hgrid'=>$nl_flags->{'res'}, 'ssp_rcp'=>$nl_flags->{'ssp_rcp'}, 'use_vichydro'=>$nl_flags->{'use_vichydro'},
+ 'sim_year'=>$nl_flags->{'sim_year'}, 'irrigate'=>".true.",
+ 'use_crop'=>$nl_flags->{'use_crop'}, 'glc_nec'=>$nl_flags->{'glc_nec'} );
}
}
diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml
index a9535ffd33..079ce80b56 100644
--- a/bld/namelist_files/namelist_defaults_ctsm.xml
+++ b/bld/namelist_files/namelist_defaults_ctsm.xml
@@ -123,14 +123,13 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/isotopes/atm_delta_C14_CMIP6_SSP5B_3x1_global_1850-2100_yearly_c181209.nc
-.true.
-.false.
-.true.
-.false.
-.false.
+.true.
+.false.
+.true.
+.false.
+.false.
-.false.
-.true.
+.false.
@@ -1487,7 +1486,7 @@ lnd/clm2/surfdata_map/release-clm5.0.30/surfdata_ne0np4.CONUS.ne30x8_hist_78pfts
lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP2-4.5-WACCM_1849-2101_monthly_c191007.nc
lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP3-7.0-WACCM_1849-2101_monthly_c191007.nc
+>lnd/clm2/ndepdata/fndep_clm_SSP370_b.e21.BWSSP370cmip6.f09_g17.CMIP6-SSP3-7.0-WACCM.002_1849-2101_monthly_0.9x1.25_c211216.nc
lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP5-8.5-WACCM_1849-2101_monthly_c191007.nc
@@ -1496,16 +1495,16 @@ lnd/clm2/surfdata_map/release-clm5.0.30/surfdata_ne0np4.CONUS.ne30x8_hist_78pfts
lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP2-4.5-WACCM_1849-2101_monthly_c191007.nc
lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP3-7.0-WACCM_1849-2101_monthly_c191007.nc
+>lnd/clm2/ndepdata/fndep_clm_SSP370_b.e21.BWSSP370cmip6.f09_g17.CMIP6-SSP3-7.0-WACCM.002_1849-2101_monthly_0.9x1.25_c211216.nc
lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP5-8.5-WACCM_1849-2101_monthly_c191007.nc
lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP1-2.6-WACCM_1849-2101_monthly_c191007.nc
lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP5-8.5-WACCM_1849-2101_monthly_c191007.nc
+>lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP2-4.5-WACCM_1849-2101_monthly_c191007.nc
lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP3-7.0-WACCM_1849-2101_monthly_c191007.nc
+>lnd/clm2/ndepdata/fndep_clm_SSP370_b.e21.BWSSP370cmip6.f09_g17.CMIP6-SSP3-7.0-WACCM.002_1849-2101_monthly_0.9x1.25_c211216.nc
cycle
NDEP_month
diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml
index f6637cd964..4e33b2cd48 100644
--- a/cime_config/config_component.xml
+++ b/cime_config/config_component.xml
@@ -243,6 +243,18 @@
This is an advanced flag and should only be used by expert users.
+
+ char
+ UNSET
+ run_component_ctsm
+ env_run.xml
+ Directory name for user-created surface, landuse, and datm datasets.
+ This is used as an argument in user_mods namelists (e.g. user_nl_clm,
+ user_nl_datm_streams) generated with the subset_data script. Users
+ should modify this variable (in shell_commands or elsewhere) to set the
+ location of user-created data. The default value is UNSET.
+
+
char
on,off
@@ -271,19 +283,19 @@
User mods to apply to specific compset matches.
-
+
char
-
+
ABBY,BLAN,CPER,DEJU,GRSM,HEAL,KONA,LENO,NIWO,ONAQ,PUUM,SERC,SRER,TALL,TREE,WOOD,
BARR,BONA,DCFS,DELA,GUAN,JERC,KONZ,MLBS,NOGP,ORNL,RMNP,SJER,STEI,TEAK,UKFS,WREF,
BART,CLBJ,DSNY,HARV,JORN,LAJA,MOAB,OAES,OSBS,SCBI,SOAP,STER,TOOL,UNDE,YELL
-
-
- run_component_ctsm
- env_run.xml
- Name of site for NEON tower data
-
-
+
+
+ run_component_ctsm
+ env_run.xml
+ Name of site for NEON tower data
+
+
=========================================
CLM naming conventions
diff --git a/cime_config/config_compsets.xml b/cime_config/config_compsets.xml
index aec030b47e..187f359c11 100644
--- a/cime_config/config_compsets.xml
+++ b/cime_config/config_compsets.xml
@@ -229,6 +229,10 @@
I2000Clm50FatesRs
2000_DATM%GSWP3v1_CLM50%FATES_SICE_SOCN_SROF_SGLC_SWAV
+
+ I2000Clm51FatesRs
+ 2000_DATM%GSWP3v1_CLM51%FATES_SICE_SOCN_SROF_SGLC_SWAV
+
I1850Clm50Bgc
diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml
index 34b4c7363d..9c32bd09d1 100644
--- a/cime_config/testdefs/testlist_clm.xml
+++ b/cime_config/testdefs/testlist_clm.xml
@@ -542,7 +542,7 @@
-
+
@@ -1124,14 +1124,6 @@
-
-
-
-
-
-
-
-
@@ -1143,7 +1135,7 @@
-
+
@@ -1158,26 +1150,9 @@
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
@@ -1186,39 +1161,39 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -1232,7 +1207,7 @@
-
+
@@ -1277,7 +1252,7 @@
-
+
@@ -1287,7 +1262,7 @@
-
+
@@ -1496,27 +1471,10 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
+
@@ -1525,11 +1483,11 @@
-
+
+
-
@@ -1537,8 +1495,8 @@
-
-
+
+
@@ -1637,14 +1595,15 @@
-
+
-
-
+
+
+
@@ -1689,7 +1648,7 @@
-
+
@@ -1750,34 +1709,9 @@
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
@@ -1893,8 +1827,7 @@
-
-
+
@@ -1902,7 +1835,7 @@
-
+
@@ -1945,9 +1878,7 @@
-
-
@@ -1984,7 +1915,6 @@
-
@@ -1995,6 +1925,7 @@
+
@@ -2018,16 +1949,7 @@
-
-
-
-
-
-
-
-
-
-
+
@@ -2291,7 +2213,7 @@
-
+
@@ -2299,12 +2221,12 @@
-
+
-
+
@@ -2493,12 +2415,17 @@
-
+
+
+
+
+
+
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/README b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/README
deleted file mode 100644
index 299d5cf468..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/README
+++ /dev/null
@@ -1,2 +0,0 @@
-This test mod outputs an optional text file containing a table of the
-history fields master list.
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/README b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/README
new file mode 100644
index 0000000000..0af53362c4
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/README
@@ -0,0 +1 @@
+Test with FATES with methane off which is normally on by default
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/include_user_mods
similarity index 100%
rename from cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/include_user_mods
rename to cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/include_user_mods
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/user_nl_clm
similarity index 62%
rename from cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/user_nl_clm
rename to cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/user_nl_clm
index b01aafeef5..4d7617fed4 100644
--- a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/user_nl_clm
@@ -1,2 +1,2 @@
-use_lch4 = .true.
+use_lch4 = .false.
hist_master_list_file = .true.
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefHydro/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefHydro/user_nl_clm
index 33c91d796a..f0bdb388eb 100644
--- a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefHydro/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefHydro/user_nl_clm
@@ -3,10 +3,10 @@ hist_nhtfrq = -24
hist_empty_htapes = .true.
use_fates_planthydro= .true.
hist_fincl1 = 'FATES_ERRH2O_SZPF', 'FATES_TRAN_SZPF',
-'FATES_SAPFLOW_SZPF', 'FATES_ITERH1_SZPF', 'FATES_ROOTH2O_ABS_SZPF',
-'FATES_ROOTH2O_TRANS_SZPF', 'FATES_STEMH2O_SZPF', 'FATES_LEAFH2O_SZPF',
-'FATES_ROOTH2O_POT_SZPF', 'FATES_BTRAN_SZPF', 'FATES_ROOTWGT_SOILVWC',
-'FATES_ROOTWGT_SOILVWCSAT', 'FATES_ROOTWGT_SOILMATPOT', 'FATES_SOILMATPOT_SL',
-'FATES_SOILVWC_SL', 'FATES_SOILVWCSAT_SL', 'FATES_ROOTUPTAKE',
-'FATES_ROOTUPTAKE_SL', 'FATES_ROOTUPTAKE0_SZPF', 'FATES_ROOTUPTAKE10_SZPF',
-'FATES_ROOTUPTAKE50_SZPF', 'FATES_ROOTUPTAKE100_SZPF'
+'FATES_SAPFLOW_SZPF', 'FATES_ITERH1_SZPF','FATES_ABSROOT_H2O_SZPF',
+'FATES_TRANSROOT_H2O_SZPF','FATES_STEM_H2O_SZPF','FATES_LEAF_H2O_SZPF',
+'FATES_ABSROOT_H2OPOT_SZPF','FATES_BTRAN_SZPF','FATES_ROOTWGT_SOILVWC',
+'FATES_ROOTWGT_SOILVWCSAT','FATES_ROOTWGT_SOILMATPOT','FATES_SOILMATPOT_SL',
+'FATES_SOILVWC_SL','FATES_SOILVWCSAT_SL','FATES_ROOTUPTAKE',
+'FATES_ROOTUPTAKE_SL','FATES_ROOTUPTAKE0_SZPF','FATES_ROOTUPTAKE10_SZPF',
+'FATES_ROOTUPTAKE50_SZPF','FATES_ROOTUPTAKE100_SZPF'
diff --git a/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/include_user_mods
new file mode 100644
index 0000000000..02ec13743f
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/include_user_mods
@@ -0,0 +1 @@
+../cropMonthOutput
diff --git a/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/shell_commands b/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/shell_commands
new file mode 100755
index 0000000000..2a9f09bd75
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/shell_commands
@@ -0,0 +1 @@
+./xmlchange CLM_FORCE_COLDSTART="on"
diff --git a/cime_config/testdefs/testmods_dirs/clm/ptsRLB/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/ptsRLB/include_user_mods
deleted file mode 100644
index cdb9d9f000..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/ptsRLB/include_user_mods
+++ /dev/null
@@ -1 +0,0 @@
-../pts
diff --git a/cime_config/testdefs/testmods_dirs/clm/ptsRLB/shell_commands b/cime_config/testdefs/testmods_dirs/clm/ptsRLB/shell_commands
deleted file mode 100644
index 15fd1cced4..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/ptsRLB/shell_commands
+++ /dev/null
@@ -1,2 +0,0 @@
-./xmlchange PTS_LAT=-5,PTS_LON=290
-./xmlchange --force CLM_FORCE_COLDSTART=on
diff --git a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl
index a70d5e9641..15ec0469be 100644
--- a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl
+++ b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl
@@ -1,7 +1,9 @@
; NCL script
; modify_smallville_with_dynurban.ncl
; Keith Oleson, Dec 2021
-; Purpose is to create a dynamic urban file for the smallville grid for test
+; Feb 23, 2022: Change HASURBAN to PCT_URBAN_MAX. The output file date has been updated from
+; c211206 to c220223.
+; Purpose is to create a transient landuse file for the smallville grid for dynamic urban testing
; ERS_Lm25.1x1_smallvilleIA.IHistClm50BgcCropQianRs.cheyenne_gnu.clm-smallville_dynurban_monthly
;**************************************
@@ -17,7 +19,7 @@ begin
print ("=========================================")
infile = "/glade/p/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc"
- outfile = "/glade/p/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c211206.nc"
+ outfile = "/glade/p/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc"
system("cp " + infile + " " + outfile)
@@ -37,24 +39,29 @@ begin
pct_urban@units = "unitless"
printVarSummary(pct_urban)
- hasurban = new((/numurbl,dimsizes(pct_crop(0,:,0)),dimsizes(pct_crop(0,0,:))/),double,"No_FillValue")
- hasurban!0 = pct_urban!1
- hasurban!1 = pct_urban!2
- hasurban!2 = pct_urban!3
- hasurban = 1.d
- printVarSummary(hasurban)
-
pct_urban(:,0,0,0) = (/0.d,20.d,10.d,10.d,10.d,10.d/)
pct_urban(:,1,0,0) = (/0.d,15.d, 8.d, 8.d, 8.d, 8.d/)
- pct_urban(:,2,0,0) = (/0.d,10.d, 5.d, 5.d, 5.d, 5.d/)
+;pct_urban(:,2,0,0) = (/0.d,10.d, 5.d, 5.d, 5.d, 5.d/)
+ pct_urban(:,2,0,0) = (/0.d, 0.d, 0.d, 0.d, 0.d, 0.d/)
+
+ pct_urban_max = new((/numurbl,dimsizes(pct_crop(0,:,0)),dimsizes(pct_crop(0,0,:))/),double,"No_FillValue")
+ pct_urban_max!0 = pct_urban!1
+ pct_urban_max!1 = pct_urban!2
+ pct_urban_max!2 = pct_urban!3
+ pct_urban_max(0,:,:) = max(pct_urban(:,0,0,0))
+ pct_urban_max(1,:,:) = max(pct_urban(:,1,0,0))
+ pct_urban_max(2,:,:) = max(pct_urban(:,2,0,0))
+ printVarSummary(pct_urban_max)
+ pct_urban_max@units = "unitless"
+ pct_urban_max@long_name = "maximum percent urban for each density type (tbd, hd, md)"
pct_crop(:,0,0) = (/0.,25.,12.,12.,12.,12./)
- outf->HASURBAN = hasurban
+ outf->PCT_URBAN_MAX = pct_urban_max
outf->PCT_URBAN = pct_urban
outf->PCT_CROP = pct_crop
- outf@history = "This file was created with the following NCL script: /glade/p/cgd/tss/people/oleson/modify_surfdata/modify_smallville_with_dynurban.ncl. The file used as a template is: /glade/p/cesm/cseg/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc. Key points are that urban area starts as 0, increases after the first year, then decreases after the second year. PCT_CROP is also changed so that PCT_URBAN + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_URBAN in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.). Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid."
+ outf@history = "This file was created with the following NCL script: /glade/p/cgd/tss/people/oleson/modify_surfdata/modify_smallville_with_dynurban.ncl. The file used as a template is: /glade/p/cesm/cseg/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc. Key points are that urban area starts as 0, increases after the first year, then decreases after the second year. Medium density urban is set to zero to test the memory-saving behavior of PCT_URBAN_MAX. PCT_CROP is also changed so that PCT_URBAN + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_URBAN in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.). Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid."
print ("=========================================")
print ("Finish Time: "+systemfunc("date") )
diff --git a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm
index ebda6ab408..69a78ee17d 100644
--- a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm
@@ -1,12 +1,13 @@
do_transient_urban = .true.
-!KO The following run_zero_weight_urban setting is temporary until the HASURBAN methdology is implemented.
-run_zero_weight_urban = .true.
-! This file was created with the following NCL script:
+! The flanduse_timeseries file was created with the following NCL script (a copy of this script is in cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly):
! /glade/p/cgd/tss/people/oleson/modify_surfdata/modify_smallville_with_dynurban.ncl
! The file used as a template is:
! /glade/p/cesm/cseg/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc
! Key points are that urban area starts as 0, increases after the first year, then decreases after the second year.
+! Medium density urban is set to zero to test the memory-saving behavior of PCT_URBAN_MAX.
! PCT_CROP is also changed so that PCT_URBAN + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_URBAN in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.)
! Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid.
-flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c211206.nc'
+! Feb 23, 2022: Use updated file with HASURBAN replaced by PCT_URBAN_MAX
+!flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc'
+flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc'
diff --git a/doc/ChangeLog b/doc/ChangeLog
index a93c31092f..9661e9574f 100644
--- a/doc/ChangeLog
+++ b/doc/ChangeLog
@@ -1,4 +1,777 @@
===============================================================
+Tag name: ctsm5.1.dev083
+Originator(s): fang-bowen (Bowen Fang) / oleson (Keith Oleson,UCAR/TSS,303-497-1332)
+ / Face2sea (Lei Zhao) / keerzhang1 (Keer Zhang) / sacks (Bill Sacks)
+Date: Tue Mar 8 14:12:00 MST 2022
+One-line Summary: Implement PCT_URBAN_MAX to minimize dynamic urban memory
+
+Purpose and description of changes
+----------------------------------
+
+Read in 'PCT_URBAN_MAX' from the landuse timeseries file (maximum urban percentage throughout
+timeseries) and initialize urban landunits in memory only where PCT_URBAN_MAX is greater than zero.
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed or introduced
+------------------------
+
+Issues fixed (include CTSM Issue #):
+- Resolves ESCOMP/CTSM#1572 (Improve mechanism for determining where to
+ allocate memory for urban when running with dynamic urban)
+
+Notes of particular relevance for developers:
+---------------------------------------------
+NOTE: Be sure to review the steps in README.CHECKLIST.master_tags as well as the coding style in the Developers Guide
+
+Changes to tests or testing:
+ERS_Lm25.1x1_smallvilleIA.IHistClm50BgcCropQianRs.cheyenne_gnu.clm-smallville_dynurban_monthly has
+been updated to accomodate PCT_URBAN_MAX
+
+Testing summary:
+----------------
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ cheyenne ---- PASS
+ izumi ------- PASS
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: NO
+
+Other details
+-------------
+
+Pull Requests that document the changes (include PR ids):
+https://github.com/ESCOMP/ctsm/pull/1661
+
+===============================================================
+===============================================================
+Tag name: ctsm5.1.dev082
+Originator(s): slevis (Samuel Levis,SLevis Consulting,303-665-1310)
+Date: Mon Feb 28 10:12:16 MST 2022
+One-line Summary: Replace dom_nat_pft with dom_plant to enable crop in fsurdat_modifier tool
+
+Purpose and description of changes
+----------------------------------
+
+ Allow user to replace vegetation in fsurdat files with any pft/cft using the
+ fsurdat_modifier tool option dom_plant. This option replaces now obsolete
+ option dom_nat_pft which handled pfts only and not cfts.
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Notes of particular relevance for users
+---------------------------------------
+Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables):
+ Instead of dom_nat_pft = UNSET, modify_template.cfg now includes the line
+ dom_plant = UNSET to allow users to set the pft/cft of their choice to replace
+ the existing vegetation.
+
+Changes to the datasets (e.g., parameter, surface or initial files):
+ New system test that checks the new code compares a generated file to a
+ baseline file. I added the baseline file to this PR:
+ .../python/ctsm/test/testinputs/surfdata_5x5_amazon_16pfts_Irrig_CMIP6_simyr2000_c171214_modified_with_crop.nc
+
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Changes to tests or testing:
+ Added a system test to test_sys_fsurdat_modifier.py to run with the new option
+ dom_plant set to 15 (i.e. a crop).
+
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ (any machine) - cheyenne PASS
+
+ clm_pymods test suite on cheyenne - PASS
+
+ any other testing (give details below):
+
+
+Answer changes
+--------------
+Changes answers relative to baseline: NO
+
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/1615
+
+===============================================================
+===============================================================
+Tag name: ctsm5.1.dev081
+Originator(s): swensosc (Sean Swenson)
+Date: Thu Feb 24 21:33:35 MST 2022
+One-line Summary: Do not subtract irrigation from QRUNOFF diagnostic
+
+Purpose and description of changes
+----------------------------------
+
+Remove code that subtracts surface irrigation flux from qflx_runoff_col.
+This is a diagnostic change only.
+
+This subtraction no longer makes sense now that irrigation is passed as
+a separate flux to the ROF model.
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Testing summary:
+----------------
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ cheyenne ---- OK
+ izumi ------- OK
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: YES, but just in a few diagnostic fields
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: Crop cases with irrigation
+ - what platforms/compilers: All
+ - nature of change (roundoff; larger than roundoff/same climate; new climate):
+ Larger than roundoff, but only impacts a few diagnostic fields:
+ QRUNOFF, QRUNOFF_R, QRUNOFF_TO_COUPLER
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+https://github.com/ESCOMP/CTSM/pull/1641
+
+===============================================================
+===============================================================
+Tag name: ctsm5.1.dev080
+Originator(s): sacks (Bill Sacks)
+Date: Thu Feb 24 15:26:02 MST 2022
+One-line Summary: Use avg days per year when converting param units
+
+Purpose and description of changes
+----------------------------------
+
+When converting parameter units from per-year to per-second, use average
+days per year instead of current number of days per year. This is
+relevant when running with a Gregorian calendar.
+
+See https://github.com/ESCOMP/CTSM/issues/1612 for details.
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed or introduced
+------------------------
+Issues fixed (include CTSM Issue #):
+- Resolves ESCOMP/CTSM#1612 (Some uses of get_days_per_year should use
+ the average number of days in a year, not the number of days in the
+ current year)
+
+Known bugs introduced in this tag (include issue #):
+- ESCOMP/CTSM#1624 (Change get_average_days_per_year to use
+ ESMF_CalendarGet)
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Caveats for developers (e.g., code that is duplicated that requires double maintenance):
+- Once ESMF supports it, we should change get_average_days_per_year to
+ use ESMF_CalendarGet (see ESCOMP/CTSM#1624)
+
+Testing summary:
+----------------
+
+ cheyenne ---- OK
+ izumi ------- OK
+
+Test
+ERS_Ly3_P72x2_Vmct.f10_f10_mg37.IHistClm50BgcCropG.cheyenne_intel.clm-cropMonthOutput
+initially failed COMPARE_base_rest and BASELINE comparisons; rerunning
+solved the issue.
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: YES
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: Gregorian cases with BGC
+ - what platforms/compilers: all
+ - nature of change (roundoff; larger than roundoff/same climate; new climate):
+ larger than roundoff / same climate
+
+ Changes a few BGC-related parameters by a small amount (< 1/365 in
+ a relative sense) for Gregorian cases
+
+ Changes answers for these tests:
+ - SMS_Ly5_Mmpi-serial.1x1_smallvilleIA.IHistClm50BgcCropQianRs.izumi_gnu.clm-gregorian_cropMonthOutput
+ - DAE_C2_D_Lh12.f10_f10_mg37.I2000Clm50BgcCrop.cheyenne_intel.clm-DA_multidrv
+ - DAE_N2_D_Lh12_Vmct.f10_f10_mg37.I2000Clm50BgcCrop.cheyenne_intel.clm-DA_multidrv
+
+ In principle, might change answers by roundoff for NOLEAP BGC
+ tests on some machines / compilers, but that wasn't seen in any
+ aux_clm testing.
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+https://github.com/ESCOMP/CTSM/pull/1625
+
+===============================================================
+===============================================================
+Tag name: ctsm5.1.dev079
+Originator(s): sacks (Bill Sacks)
+Date: Thu Feb 24 14:40:58 MST 2022
+One-line Summary: Changes to CropPhenology timing
+
+Purpose and description of changes
+----------------------------------
+
+Changes to CropPhenology timing to support
+https://github.com/escomp/ctsm/pull/1616 and other work being done by
+Sam Rabin:
+
+(1) Change CropPhenology to look at the time as of the START of the time
+ step. Previously, CropPhenology looked at time as of the END of the
+ time step. This was somewhat problematic, particularly because it
+ meant that the last time step of the year was considered Jan 1, and
+ so crops with a planting window beginning Jan 1 could be planted at
+ the end of the previous year rather than the start of the new year.
+ This was becoming particularly problematic in the context of Sam
+ Rabin's upcoming prescribed sowing date work (see #1623 and #1616
+ for some discussion).
+
+(2) Call CropPhenology regardless of doalb (however, still do not call
+ CropPhenology on time step 0). (See some discussion in #1626 and
+ #1623.)
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed or introduced
+------------------------
+[Remove any lines that don't apply. Remove entire section if nothing applies.]
+
+Issues fixed (include CTSM Issue #):
+- Resolves ESCOMP/CTSM#1623 (Change CropPhenology to look at the day of
+ year as of the start of the time step, not the end of the time step)
+
+Testing summary:
+----------------
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ cheyenne ---- OK
+ izumi ------- OK
+
+Also ran
+ERS_Ld3_D.f09_g17.I1850Clm50BgcCrop.cheyenne_intel.clm-rad_hrly_light_res_half
+(a debug version of the one test in the test suite that uses hourly
+doalb)
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: YES
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: Crop cases
+ - what platforms/compilers: All
+ - nature of change (roundoff; larger than roundoff/same climate; new climate):
+ Not investigated carefully, but expected to be larger than
+ roundoff/same climate
+
+ Based on an analysis of the code, the differences should be small,
+ but I haven't investigated this with simulations. In principle,
+ there could be larger answer changes for crops that invoke
+ vernalization in cases where doalb isn't true every time step, but
+ currently I think we don't simulate any crops that invoke
+ vernalization. It's also possible that there are other larger
+ changes in cases where doalb isn't true every time step (i.e., F
+ and B compsets), though from reading through the code, I don't see
+ any potentials for big changes.
+
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+https://github.com/ESCOMP/CTSM/pull/1628
+
+===============================================================
+===============================================================
+Tag name: ctsm5.1.dev078
+Originator(s): sacks (Bill Sacks)
+Date: Thu Feb 24 14:15:46 MST 2022
+One-line Summary: Rework single-point testing
+
+Purpose and description of changes
+----------------------------------
+
+Two changes to make the single-point tests in our test suite run more
+smoothly. The main motivation for this is that single-point tests have
+been failing frequently on cheyenne, presumably due to issues with nodes
+in the share queue.
+
+(1) Rework the test list to remove redundant single-point tests and move
+ many of our single-point tests from cheyenne to izumi. (With (2) in
+ place, this isn't really necessary, but given that we're going to be
+ using full nodes for single-point tests, it seems better to do this
+ on izumi rather than wasting all of these processors on the
+ generally more heavily-loaded cheyenne. Also, this helps accomplish
+ the goal of providing better balance between our different
+ machine-compiler permutations, helping to reduce overall testing
+ time.)
+
+(2) Change run_sys_tests so that, for the remaining single-point tests
+ on cheyenne, we use the regular queue rather than the share queue.
+ This means that single-point tests use a full node, which is
+ wasteful but makes the tests more reliable. (You can override this
+ behavior with the --queue option to run_sys_tests: To revert to the
+ earlier behavior, where the queue is determined automatically for
+ each test, add '--queue unspecified' to your run_sys_tests
+ invocation. (As before --queue can also be used to specify some
+ other queue to use for all tests.))
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed or introduced
+------------------------
+Issues fixed (include CTSM Issue #):
+- Partially addresses ESCOMP/CTSM#275
+
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Changes to tests or testing:
+- Overhauls single-point tests. See
+ https://github.com/ESCOMP/CTSM/pull/1660#issuecomment-1049489430 for details.
+
+
+Testing summary:
+----------------
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ cheyenne - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ cheyenne ---- PASS
+ izumi ------- PASS
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: NO
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+https://github.com/ESCOMP/CTSM/pull/1660
+
+===============================================================
+===============================================================
+Tag name: ctsm5.1.dev077
+Originator(s): rgknox (Ryan Knox,,,)
+Date: Tue Feb 22 12:51:04 EST 2022
+One-line Summary: Updates to FATES API, including removal of patch dimensions from fates history and using soil instead of ground layers for fates history.
+
+Purpose and description of changes
+----------------------------------
+
+This set of changes cleaned up some aspects of the FATES history diagnostics API. Patches are not used in output diagnostics, so they were removed from the history coupling. Also, FATES only "sees" the soil, not the rock layers, so it is more appropriate to only align FATES below ground history output on the soil layers, and not ground layers. Also, the corresponding FATES side changes have some history output names changed. This is an API change as well, and is compatable with FATES: https://github.com/NGEET/fates/releases/tag/sci.1.54.0_api.22.0.0
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+[X] clm5_1-FATES-hydro
+
+
+Notes of particular relevance for users
+---------------------------------------
+
+ FATES users should now benefit from more interpretable history output from soil dimensioned variables, where they won't have to filter out ground layers which previously contained no-data flags. Also, note the following FATES history variable name changes:
+
+ FATES_ROOTH2O_ABS_SZPF -> FATES_ABSROOT_H2O_SZPF
+ FATES_ROOTH2O_TRANS_SZPF -> FATES_TRANSROOT_H2O_SZPF
+ FATES_STEMH2O_SZPF -> FATES_STEM_H2O_SZPF
+ FATES_LEAFH2O_SZPF -> FATES_LEAF_H2O_SZPF
+ FATES_ROOTH2O_POT_SZPF -> FATES_ABSROOT_H2OPOT_SZPF
+ FATES_ROOTH2O_TRANSPOT_SZPF -> FATES_TRANSROOT_H2OPOT_SZPF
+ FATES_STEMH2O_POT_SZPF -> FATES_STEM_H2OPOT_SZPF
+ FATES_LEAFH2O_POT_SZPF -> FATES_LEAF_H2OPOT_SZPF
+ FATES_ROOT_ABSFRAC_SZPF -> FATES_ABSROOT_CONDFRAC_SZPF
+ FATES_ROOT_TRANSFRAC_SZPF -> FATES_TRANSROOT_CONDFRAC_SZPF
+ FATES_STEMH2O_FRAC_SZPF -> FATES_STEM_CONDFRAC_SZPF
+ FATES_LEAFH2O_FRAC_SZPF -> FATES_LEAF_CONDFRAC_SZPF
+
+Caveats for users (e.g., need to interpolate initial conditions): FATES users may have scripts that assume below ground diagnostics are on the ground dimension, and will have to update scripts to use the soil dimension.
+
+Changes made to namelist defaults (e.g., changed parameter values): No changes to defaults, but some FATES plant hydraulics history variable names were changed in the corresponding FATES tag.
+
+Changes to tests or testing: An update was made to the fates-hydro tests to use updated history variable names.
+
+
+Testing summary:
+----------------
+
+ d) regular (regular tests on normal machines if CTSM source is modified)
+
+ cheyenne ---- /glade/scratch/rgknox/tests_0220-124230ch (OK) expected FATES baseline DIFFS
+ izumi ------- /scratch/cluster/rgknox/tests_0220-125238iz (OK)
+
+ fates tests: fates test suite was run, (OK)
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: All results are B4B with base (dev076), except for two conditions: 1) FATES output that uses the soil dimension has NLCOMP and DIFFS due to the dimension change, and 2) FATES hydro simulations have different results due to bug fixes
+
+
+Other details
+-------------
+
+List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): fates
+
+Pull Requests that document the changes (include PR ids):
+https://github.com/ESCOMP/CTSM/pull/1592
+https://github.com/NGEET/fates/pull/766
+
+===============================================================
+===============================================================
+Tag name: ctsm5.1.dev076
+Originator(s): negins (Negin Sobhani,UCAR/TSS,303-497-1224)
+Date: Fri Feb 18 13:25:19 MST 2022
+One-line Summary: updating subset_data.py script and move to the Python package.
+
+Purpose and description of changes
+----------------------------------
+The purpose of this PR was to update and move the subset_data.py and other
+relevant python scripts to the CTSM python package so the other python codes
+(such as fsurdat_modifier) can re-use/import the capabilities of this script.
+
+This updated code now adds the capability to create user-mods for a generic
+singlepoint case.
+
+The subset_data.py (top script called subset_data)extracts surface dataset,
+domain file, landuse, and DATM files at a single point or a region from the
+available global dataset. Next it modifies the surface dataset based on the
+user options (command-line arguments) for example dom-pft or zero-ing non-veg
+types, etc.
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed or introduced
+------------------------
+[Remove any lines that don't apply. Remove entire section if nothing applies.]
+
+Issues fixed (include CTSM Issue #):
+ Fixes CTSM/#935 -- Make surface dataset and landuse consistent for singlept
+ Fixes CTSM/#1436 -- Running subset_data from any directory
+ Fixes CTSM/#1437 -- issue with modify_singlept_site_neon for finding latest file
+ Fixes CTSM/#1594 -- try/except for python download neon data
+ Fixes CTSM/#1606 -- NEON tools not handling crop weights correctly.
+ Part of development needed for #1490 -- generic single point simulation.
+ Partially Fixes CTSM/#1622 -- check if file exists and abort if not found + --overwrite
+ Fixes it for subset_data
+ Partially Fixes CTSM/#1441 -- Move critical toolchain script out of tools/contrib
+ Fixes it for subset_data
+
+CIME Issues fixed (include issue #): N/A
+
+Known bugs introduced in this tag (include issue #): N/A
+
+Known bugs found since the previous tag (include issue #): N/A
+
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Changes to tests or testing:
+ Added python/ctsm/test/test_unit_args_utils.py (11 unit tests)
+ Added python/ctsm/test/test_unit_singlept_data.py (10 unit tests)
+ Added python/ctsm/test/test_unit_singlept_data_surfdata.py (24 unit tests)
+ Modified tests in python/ctsm/test/test_unit_utils.py
+ Modified tests in python/ctsm/test/test_unit_modify_fsurdat.py
+ Added 10 new tests in tests/tools/input_tests_master thanks to @ekluzek
+ Added test/tools/tests_pretag_nompi_neon
+
+Testing summary:
+----------------
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ clm_pymods test suite on cheyenne - PASS
+ pylint -- PASS (10.00/10) (cheyenne + python/3.7.9)
+ Python unit tests -- PASS (cheyenne)
+ Python system tests -- PASS (cheyenne)
+
+ tools-tests (test/tools) (if tools have been changed):
+
+ cheyenne - OK (tests_pretag_nompi_neon + subset_data tests)
+
+Answer changes
+--------------
+Changes answers relative to baseline: NO
+
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+https://github.com/ESCOMP/CTSM/pull/1461
+
+===============================================================
+===============================================================
+Tag name: ctsm5.1.dev075
+Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326)
+Date: Wed Feb 16 21:35:40 MST 2022
+One-line Summary: Small answer changes: urban ventilation, fire-emission, irrigate off when not crop, fix two SSP ndep files
+
+Purpose and description of changes
+----------------------------------
+
+The effects of ventilation (exchange of building air with canopy air) are accounted for in the
+energy budget inside the building. The effects on urban canopy air are not. This remedies that.
+
+This fixes an issue with how coefficients are applied to individual fire components. Allow units
+to be in molecules/m2/sec as well as kg/m2/sec.
+
+SSP3-7.0 Nitrogen-deposition file update. Fix SSP2-4.5 Nitrogen-deposition file. Turn irrigation
+off by default except for future scenarios.
+
+Add new external ccs_config_cesm which has the CESM cime configuration files.
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[x] clm5_1
+
+[x] clm5_0
+
+[x] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed or introduced
+------------------------
+Issues fixed (include CTSM Issue #):
+ Fixes #1526 -- Switch FATES methane test for methane off
+ Fixes #509 --- irrigate in 1850 is off for runs with use_crop but on for those without
+ Fixes #1578 -- Need to replace SSP370 ndep file with new one
+ Fixes #1631 -- ndep file for SSP2-4.5 is incorrect
+
+Known bugs introduced in this tag (include issue #):
+ #1653 -- Use secsphr in UrbBuildTempOleson2015Mod.F90
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+ using older restart files will start ventilation flux at zero.
+ default fsurdat selection can also match irrigate true surface datasets
+
+Changes made to namelist defaults (e.g., changed parameter values):
+ irrigate is now default off, except for future scenarios
+
+Changes to the datasets (e.g., parameter, surface or initial files):
+ New Nitrogen-deposition file for SSP3-7.0
+ Correct Nitrogen-deposition file for SSP2-4.5
+
+Notes of particular relevance for developers:
+---------------------------------------------
+
+Changes to tests or testing:
+ FATES methane on test switched to methane off, since methane on is the default
+
+Testing summary: regular
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ cheyenne - PASS (345 tests are different than baseline)
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ cheyenne - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ cheyenne ---- PASS
+ izumi ------- OK
+
+If the tag used for baseline comparisons was NOT the previous tag, note that here: previous
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: Yes
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: clm5_0/clm5_1 urban ventilation flux,
+ irrigate off now, SSP3-7.0/SSP2-4.5 new ndep file, fire-emissions different
+ - what platforms/compilers: All
+ - nature of change: Similar climate
+
+ If this tag changes climate describe the run(s) done to evaluate the new
+ climate (put details of the simulations in the experiment database)
+ - casename: oleson/ctsm51_ctsm51d61_1deg_GSWP3V1_CON_VENT_2000
+
+ URL for LMWG diagnostics output used to validate new climate:
+ https://webext.cgd.ucar.edu/I2000/ctsm51_ctsm51d61_1deg_GSWP3V1_CON_VENT_2000/lnd/ctsm51_ctsm51d61_1deg_GSWP3V1_CON_VENT_2000.1991_2010-ctsm51_ctsm51d61_1deg_GSWP3V1_CON_2000.1991_2010/set2/set2.html
+
+
+Other details
+-------------
+
+List any externals directories updated (cime, rtm, mosart, cism, fates, etc.):
+ cism to cismwrap_2_1_95
+ cime to cime6.0.13
+ cmeps to cmeps0.13.47
+ cdeps to cdeps0.12.35
+ cpl7 to cpl7.0.12
+ pio to pio2_5_5
+
+Pull Requests that document the changes (include PR ids):
+(https://github.com/ESCOMP/ctsm/pull)
+ #1535 -- Ventilation flux to urban canyon
+ #1532 -- Fire emissions bug fix
+
+===============================================================
+===============================================================
Tag name: ctsm5.1.dev074
Originator(s): slevis (Samuel Levis,SLevis Consulting,303-665-1310)
Date: Wed Feb 2 00:44:27 MST 2022
diff --git a/doc/ChangeSum b/doc/ChangeSum
index 93e87f9495..a7cc71ff1f 100644
--- a/doc/ChangeSum
+++ b/doc/ChangeSum
@@ -1,5 +1,14 @@
Tag Who Date Summary
============================================================================================================================
+ ctsm5.1.dev083 multiple 03/08/2022 Implement PCT_URBAN_MAX to minimize dynamic urban memory
+ ctsm5.1.dev082 slevis 02/28/2022 Replace dom_nat_pft with dom_plant to enable crop in fsurdat_modifier tool
+ ctsm5.1.dev081 swensosc 02/24/2022 Do not subtract irrigation from QRUNOFF diagnostic
+ ctsm5.1.dev080 sacks 02/24/2022 Use avg days per year when converting param units
+ ctsm5.1.dev079 sacks 02/24/2022 Changes to CropPhenology timing
+ ctsm5.1.dev078 sacks 02/24/2022 Rework single-point testing
+ ctsm5.1.dev077 rgknox 02/22/2022 Updates to FATES API, including removal of patch dimensions from fates history and using soil instead of ground layers for fates history
+ ctsm5.1.dev076 negins 02/18/2022 updating subset_data.py script and move to the Python package.
+ ctsm5.1.dev075 erik 02/16/2022 Small answer changes: urban ventilation, fire-emission, irrigate off when not crop, fix two SSP ndep files
ctsm5.1.dev074 slevis 02/02/2022 Introduce vert. resolved MIMICS as new method to solve below ground decomp.
ctsm5.1.dev073 sacks 01/25/2022 Some fixes for Gregorian calendar
ctsm5.1.dev072 negins 01/17/2022 mksurfdat toolchain part 1: gen_mksurf_namelist
diff --git a/python/ctsm/args_utils.py b/python/ctsm/args_utils.py
new file mode 100644
index 0000000000..d944227157
--- /dev/null
+++ b/python/ctsm/args_utils.py
@@ -0,0 +1,53 @@
+"""
+General-purpose utilities for handling command-line
+arguments and flags in ctsm python codes.
+Types for command-lines error handling.
+"""
+
+import logging
+import argparse
+
+from ctsm.config_utils import lon_range_0_to_360
+
+logger = logging.getLogger(__name__)
+
+def plat_type(plat):
+ """
+ Function to define lat type for the parser
+ and
+ raise error if latitude is not between -90 and 90.
+
+ Args:
+ plat(str): latitude
+ Raises:
+ Error (ArgumentTypeError): when plat (latitude) is not between -90 and 90.
+ Returns:
+ plat_out (float): latitude in float
+ """
+ plat_out = float(plat)
+ if plat_out < -90 or plat_out > 90:
+ raise argparse.ArgumentTypeError(
+ "ERROR: Latitude should be between -90 and 90."
+ )
+ return plat_out
+
+def plon_type(plon):
+ """
+ Function to define lon type for the parser and
+ convert negative longitudes to 0-360 and
+ raise error if lon is not between -180 and 360.
+
+ Args:
+ plon (str): longitude
+ Raises:
+ Error (ArgumentTypeError): when longitude is <-180 and >360.
+ Returns:
+ plon_out (float): converted longitude between 0 and 360
+ """
+ plon_float = float(plon)
+ if plon_float < -180 or plon_float > 360:
+ raise argparse.ArgumentTypeError(
+ "ERROR: Longitude should be between 0 and 360 or -180 and 180."
+ )
+ plon_out = lon_range_0_to_360(plon_float)
+ return plon_out
diff --git a/python/ctsm/config_utils.py b/python/ctsm/config_utils.py
new file mode 100644
index 0000000000..0ece3a180a
--- /dev/null
+++ b/python/ctsm/config_utils.py
@@ -0,0 +1,166 @@
+"""
+General-purpose utilities and functions for handling command-line
+config files in ctsm python codes.
+"""
+
+import logging
+import configparser
+
+from ctsm.utils import abort
+
+logger = logging.getLogger(__name__)
+
+# This string is used in the out-of-the-box ctsm.cfg and modify.cfg files
+# to denote a value that needs to be filled in
+_CONFIG_PLACEHOLDER = "FILL_THIS_IN"
+# This string is used in the out-of-the-box ctsm.cfg and modify.cfg files
+# to denote a value that can be filled in, but doesn't absolutely need to be
+_CONFIG_UNSET = "UNSET"
+
+
+def lon_range_0_to_360(lon_in):
+ """
+ Description
+ -----------
+ Restrict longitude to 0 to 360 when given as -180 to 180.
+ """
+ if -180 <= lon_in < 0:
+ lon_out = lon_in % 360
+ logger.info(
+ "Resetting longitude from %s to %s to keep in the range " " 0 to 360",
+ str(lon_in),
+ str(lon_out),
+ )
+ elif 0 <= lon_in <= 360 or lon_in is None:
+ lon_out = lon_in
+ else:
+ errmsg = "lon_in needs to be in the range 0 to 360"
+ abort(errmsg)
+
+ return lon_out
+
+
+def get_config_value(
+ config,
+ section,
+ item,
+ file_path,
+ allowed_values=None,
+ default=None,
+ is_list=False,
+ convert_to_type=None,
+ can_be_unset=False,
+):
+ """Get a given item from a given section of the config object
+ Give a helpful error message if we can't find the given section or item
+ Note that the file_path argument is only used for the sake of the error message
+ If allowed_values is present, it should be a list of strings giving allowed values
+ The function _handle_config_value determines what to do if we read:
+ - a list or
+ - a str that needs to be converted to int / float / bool
+ - _CONFIG_UNSET: anything with the value "UNSET" will become "None"
+ """
+ try:
+ val = config.get(section, item)
+ except configparser.NoSectionError:
+ abort(
+ "ERROR: Config file {} must contain section '{}'".format(file_path, section)
+ )
+ except configparser.NoOptionError:
+ abort(
+ "ERROR: Config file {} must contain item '{}' in section '{}'".format(
+ file_path, item, section
+ )
+ )
+
+ if val == _CONFIG_PLACEHOLDER:
+ abort(
+ "Error: {} needs to be specified in config file {}".format(item, file_path)
+ )
+
+ val = _handle_config_value(
+ var=val,
+ default=default,
+ item=item,
+ is_list=is_list,
+ convert_to_type=convert_to_type,
+ can_be_unset=can_be_unset,
+ allowed_values=allowed_values,
+ )
+ return val
+
+
+def _handle_config_value(
+ var, default, item, is_list, convert_to_type, can_be_unset, allowed_values
+):
+ """
+ Description
+ -----------
+ Assign the default value or the user-specified one to var.
+ Convert from default type (str) to reqested type (int or float).
+
+ If is_list is True, then default should be a list
+ """
+ if var == _CONFIG_UNSET:
+ if can_be_unset:
+ return default # default may be None
+ abort("Must set a value for .cfg file variable: {}".format(item))
+
+ # convert string to list of strings; if there is just one element,
+ # we will get a list of size one, which we will convert back to a
+ # scalar later if needed
+ var = var.split()
+
+ if convert_to_type is bool:
+ try:
+ var = [_convert_to_bool(v) for v in var]
+ except ValueError:
+ abort("Non-boolean value found for .cfg file variable: {}".format(item))
+ elif convert_to_type is not None:
+ try:
+ var = [convert_to_type(v) for v in var]
+ except ValueError:
+ abort("Wrong type for .cfg file variable: {}".format(item))
+
+ if allowed_values is not None:
+ for val in var:
+ if val not in allowed_values:
+ print("val = ", val, " in var not in allowed_values")
+ errmsg = (
+ "{} is not an allowed value for {} in .cfg file. "
+ "Check allowed_values".format(val, item)
+ )
+ abort(errmsg)
+
+ if not is_list:
+ if len(var) > 1:
+ abort("More than 1 element found for .cfg file variable: {}".format(item))
+ var = var[0]
+
+ return var
+
+
+def _convert_to_bool(var):
+ """
+ Function for converting different forms of
+ boolean strings to boolean value.
+
+ Args:
+ var (str): String bool input
+
+ Raises:
+ if the argument is not an acceptable boolean string
+ (such as yes or no ; true or false ; y or n ; t or f ; 0 or 1).
+ ValueError: The string should be one of the mentioned values.
+
+ Returns:
+ var_out (bool): Boolean value corresponding to the input.
+ """
+ if var.lower() in ("yes", "true", "t", "y", "1", "on"):
+ var_out = True
+ elif var.lower() in ("no", "false", "f", "n", "0", "off"):
+ var_out = False
+ else:
+ raise ValueError("Boolean value expected. [true or false] or [y or n]")
+
+ return var_out
diff --git a/python/ctsm/ctsm_logging.py b/python/ctsm/ctsm_logging.py
index 7d63b9b463..ff51c6d8f2 100644
--- a/python/ctsm/ctsm_logging.py
+++ b/python/ctsm/ctsm_logging.py
@@ -31,6 +31,7 @@
logger = logging.getLogger(__name__)
+
def setup_logging_pre_config():
"""Setup logging for a script / application
@@ -41,12 +42,14 @@ def setup_logging_pre_config():
"""
setup_logging(level=logging.WARNING)
+
def setup_logging_for_tests(enable_critical=False):
"""Setup logging as appropriate for unit tests"""
setup_logging(level=logging.CRITICAL)
if not enable_critical:
logging.disable(logging.CRITICAL)
+
def setup_logging(level=logging.WARNING):
"""Setup logging for a script / application
@@ -54,18 +57,24 @@ def setup_logging(level=logging.WARNING):
do NOT intend to allow the user to control logging preferences via command-line
arguments, so that all of the final logging options are set here.
"""
- logging.basicConfig(format='%(levelname)s: %(message)s', level=level)
+ logging.basicConfig(format="%(levelname)s: %(message)s", level=level)
+
def add_logging_args(parser):
"""Add common logging-related options to the argument parser"""
logging_level = parser.add_mutually_exclusive_group()
- logging_level.add_argument('-v', '--verbose', action='store_true',
- help='Output extra logging info')
+ logging_level.add_argument(
+ "-v", "--verbose", action="store_true", help="Output extra logging info"
+ )
+
+ logging_level.add_argument(
+ "--debug",
+ action="store_true",
+ help="Output even more logging info for debugging",
+ )
- logging_level.add_argument('--debug', action='store_true',
- help='Output even more logging info for debugging')
def process_logging_args(args):
"""Configure logging based on the logging-related args added by add_logging_args"""
@@ -77,3 +86,13 @@ def process_logging_args(args):
root_logger.setLevel(logging.INFO)
else:
root_logger.setLevel(logging.WARNING)
+
+
+def output_to_file(file_path, message, log_to_logger=False):
+ """
+ helper function to write to log file.
+ """
+ with open(file_path, "a") as log_file:
+ log_file.write(message)
+ if log_to_logger:
+ logger.info(message)
diff --git a/python/ctsm/download_utils.py b/python/ctsm/download_utils.py
new file mode 100644
index 0000000000..c6b5cee3ec
--- /dev/null
+++ b/python/ctsm/download_utils.py
@@ -0,0 +1,47 @@
+"""General-purpose utility functions"""
+
+import logging
+
+import requests
+
+from ctsm.utils import abort
+
+logger = logging.getLogger(__name__)
+
+
+def download_file(url, fname):
+ """
+ Function to download a file.
+ Args:
+ url (str):
+ url of the file for downloading
+ fname (str) :
+ file name to save the downloaded file.
+
+ Raises:
+ Error :
+ When the file is not available on the server (status_code:404)
+ Error:
+ When download fails for any reason.
+ """
+ try:
+ response = requests.get(url)
+
+ # pylint: disable=broad-except
+ except Exception as err:
+ logger.warning("The server could not fulfill the request.")
+ logger.warning("Something went wrong in downloading: %s", fname)
+ err_msg = "Couldn't download file " + fname + "-- Error code:" + err
+ abort(err_msg)
+
+ with open(fname, "wb") as this_f:
+ this_f.write(response.content)
+
+ # -- Check if download status_code
+ if response.status_code == 200:
+ logger.info("Download finished successfully for : %s", fname)
+
+ elif response.status_code == 404:
+ logger.warning("This file is not available on the server: %s", fname)
+ err_msg = "Couldn't download file " + fname + "-- Error code: " + "404"
+ abort(err_msg)
diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py
index 2f221a4bfb..65648a2fbf 100644
--- a/python/ctsm/git_utils.py
+++ b/python/ctsm/git_utils.py
@@ -3,19 +3,65 @@
import logging
import subprocess
+from ctsm.path_utils import path_to_ctsm_root
+
logger = logging.getLogger(__name__)
-def tag_describe():
+def get_ctsm_git_short_hash():
+ """
+ Returns Git short SHA for the CTSM repository.
+
+ Args:
+
+ Raises:
+
+ Returns:
+ sha (str) : git short hash for ctsm repository
+ """
+ sha = (
+ subprocess.check_output(
+ ["git", "-C", path_to_ctsm_root(), "rev-parse", "--short", "HEAD"]
+ )
+ .strip()
+ .decode()
+ )
+ return sha
+
+
+def get_ctsm_git_long_hash():
+ """
+ Returns Git long SHA for the CTSM repository.
+
+ Args:
+
+ Raises:
+
+ Returns:
+ sha (str) : git long hash for ctsm repository
+ """
+ sha = (
+ subprocess.check_output(["git", "-C", path_to_ctsm_root(), "rev-parse", "HEAD"])
+ .strip()
+ .decode()
+ )
+ return sha
+
+
+def get_ctsm_git_describe():
"""
- Function for giving the recent tag of the git repo
+ Function for giving the recent tag of the CTSM repository
Args:
Raises:
Returns:
- label.decode (str) : ouput of running 'git describe' in shell
+ label (str) : ouput of running 'git describe' for the CTSM repository
"""
- label = subprocess.check_output(["git", "describe"]).strip()
- return label.decode()
+ label = (
+ subprocess.check_output(["git", "-C", path_to_ctsm_root(), "describe"])
+ .strip()
+ .decode()
+ )
+ return label
diff --git a/python/ctsm/lilac_make_runtime_inputs.py b/python/ctsm/lilac_make_runtime_inputs.py
index 567b79d1a1..1d156077d9 100644
--- a/python/ctsm/lilac_make_runtime_inputs.py
+++ b/python/ctsm/lilac_make_runtime_inputs.py
@@ -11,7 +11,8 @@
from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args
from ctsm.path_utils import path_to_ctsm_root
-from ctsm.utils import abort, get_config_value
+from ctsm.utils import abort
+from ctsm.config_utils import get_config_value
logger = logging.getLogger(__name__)
diff --git a/python/ctsm/machine.py b/python/ctsm/machine.py
index 36e5c61788..607e0b43af 100644
--- a/python/ctsm/machine.py
+++ b/python/ctsm/machine.py
@@ -7,6 +7,11 @@
from ctsm.joblauncher.job_launcher_factory import \
create_job_launcher, JOB_LAUNCHER_NOBATCH
+# Value of create_test_queue for which we don't actually add a '--queue' option to
+# create_test, but instead leave that value unspecified, allowing CIME to pick an
+# appropriate queue for each test using its standard mechanisms.
+CREATE_TEST_QUEUE_UNSPECIFIED = "unspecified"
+
logger = logging.getLogger(__name__)
# TODO(wjs, 2018-08-31) Turn this into a real class, with getter methods.
@@ -28,6 +33,7 @@
'baseline_dir', # str
'account', # str or None
'create_test_retry', # int
+ 'create_test_queue', # str
'job_launcher']) # subclass of JobLauncherBase
def create_machine(machine_name, defaults, job_launcher_type=None,
@@ -80,6 +86,7 @@ def create_machine(machine_name, defaults, job_launcher_type=None,
mach_defaults = defaults.get(machine_name)
baseline_dir = None
create_test_retry = 0
+ create_test_queue = CREATE_TEST_QUEUE_UNSPECIFIED
if mach_defaults is not None:
if job_launcher_type is None:
job_launcher_type = mach_defaults.job_launcher_type
@@ -95,10 +102,12 @@ def create_machine(machine_name, defaults, job_launcher_type=None,
# generation and comparison, or making a link in some temporary location that
# points to the standard baselines).
baseline_dir = mach_defaults.baseline_dir
- # We also don't provide a way to override the default create_test_retry in the
- # machine object: this will always give the default value for this machine, and
- # other mechanisms will be given for overriding this in a particular case.
+ # We also don't provide a way to override the default create_test_retry or
+ # create_test_queue in the machine object: these will always give the default
+ # value for this machine, and other mechanisms will be given for overriding these
+ # in a particular case.
create_test_retry = mach_defaults.create_test_retry
+ create_test_queue = mach_defaults.create_test_queue
if account is None and mach_defaults.account_required and not allow_missing_entries:
raise RuntimeError("Could not find an account code")
else:
@@ -149,6 +158,7 @@ def create_machine(machine_name, defaults, job_launcher_type=None,
baseline_dir=baseline_dir,
account=account,
create_test_retry=create_test_retry,
+ create_test_queue=create_test_queue,
job_launcher=job_launcher)
def get_possibly_overridden_mach_value(machine, varname, value=None):
diff --git a/python/ctsm/machine_defaults.py b/python/ctsm/machine_defaults.py
index c6b624b885..6b387741d5 100644
--- a/python/ctsm/machine_defaults.py
+++ b/python/ctsm/machine_defaults.py
@@ -6,6 +6,7 @@
import os
from ctsm.joblauncher.job_launcher_factory import \
JOB_LAUNCHER_QSUB
+from ctsm.machine import CREATE_TEST_QUEUE_UNSPECIFIED
from ctsm.machine_utils import get_user
MachineDefaults = namedtuple('MachineDefaults', ['job_launcher_type',
@@ -13,6 +14,7 @@
'baseline_dir',
'account_required',
'create_test_retry',
+ 'create_test_queue',
'job_launcher_defaults'])
# job_launcher_type: one of the JOB_LAUNCHERs defined in job_launcher_factory
# scratch_dir: str
@@ -23,6 +25,10 @@
# for the non-default job launcher for this machine, in case the user chooses a
# non-default launcher.)
# create_test_retry: int: Default number of times to retry a create_test job on this machine
+# create_test_queue: str: Default queue to use for create_test; if this is
+# CREATE_TEST_QUEUE_UNSPECIFIED, then we won't add a '--queue' option to create_test,
+# instead leaving that value unspecified, allowing CIME to pick an appropriate queue
+# for each test using its standard mechanisms.
# account_required: bool: whether an account number is required on this machine (not
# really a default, but used for error-checking)
@@ -43,6 +49,10 @@
baseline_dir=os.path.join(os.path.sep, 'glade', 'p', 'cgd', 'tss', 'ctsm_baselines'),
account_required=True,
create_test_retry=0,
+ # NOTE(wjs, 2022-02-23) By default, use the regular queue, even for
+ # single-processor jobs. This is because the share queue has been really flaky,
+ # with lots of job failures or slow-running jobs.
+ create_test_queue='regular',
job_launcher_defaults={
JOB_LAUNCHER_QSUB: QsubDefaults(
queue='regular',
@@ -60,6 +70,7 @@
baseline_dir=os.path.join(os.path.sep, 'fs', 'cgd', 'csm', 'ccsm_baselines'),
account_required=False,
create_test_retry=0,
+ create_test_queue=CREATE_TEST_QUEUE_UNSPECIFIED,
job_launcher_defaults={
JOB_LAUNCHER_QSUB: QsubDefaults(
queue='medium',
@@ -75,6 +86,7 @@
# jobs on izumi experience a high frequency of failures, often at the very end of
# the job; so we'll automatically retry a failed job twice before giving up on it
create_test_retry=2,
+ create_test_queue=CREATE_TEST_QUEUE_UNSPECIFIED,
job_launcher_defaults={
JOB_LAUNCHER_QSUB: QsubDefaults(
queue='medium',
diff --git a/python/ctsm/modify_fsurdat/fsurdat_modifier.py b/python/ctsm/modify_fsurdat/fsurdat_modifier.py
index 76b2374a05..f9c0ac9682 100644
--- a/python/ctsm/modify_fsurdat/fsurdat_modifier.py
+++ b/python/ctsm/modify_fsurdat/fsurdat_modifier.py
@@ -8,7 +8,7 @@
import logging
import argparse
from configparser import ConfigParser
-from ctsm.utils import get_config_value
+from ctsm.config_utils import get_config_value
from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args
from ctsm.modify_fsurdat.modify_fsurdat import ModifyFsurdat
@@ -65,10 +65,15 @@ def fsurdat_modifier(cfg_path):
landmask_file = get_config_value(config=config, section=section,
item='landmask_file', file_path=cfg_path, can_be_unset=True)
+ # Create ModifyFsurdat object
+ modify_fsurdat = ModifyFsurdat.init_from_file(fsurdat_in,
+ lnd_lon_1, lnd_lon_2, lnd_lat_1, lnd_lat_2, landmask_file)
+
# not required: user may set these in the .cfg file
- dom_nat_pft = get_config_value(config=config, section=section,
- item='dom_nat_pft', file_path=cfg_path,
- allowed_values=range(15), # integers from 0 to 14
+ max_pft = int(max(modify_fsurdat.file.lsmpft))
+ dom_plant = get_config_value(config=config, section=section,
+ item='dom_plant', file_path=cfg_path,
+ allowed_values=range(max_pft + 1), # integers from 0 to max_pft
convert_to_type=int, can_be_unset=True)
lai = get_config_value(config=config, section=section, item='lai',
@@ -84,9 +89,10 @@ def fsurdat_modifier(cfg_path):
item='hgt_bot', file_path=cfg_path, is_list=True,
convert_to_type=float, can_be_unset=True)
+ max_soil_color = int(modify_fsurdat.file.mxsoil_color)
soil_color = get_config_value(config=config, section=section,
item='soil_color', file_path=cfg_path,
- allowed_values=range(1, 21), # integers from 1 to 20
+ allowed_values=range(1, max_soil_color + 1), # 1 to max_soil_color
convert_to_type=int, can_be_unset=True)
std_elev = get_config_value(config=config, section=section,
@@ -96,10 +102,6 @@ def fsurdat_modifier(cfg_path):
item='max_sat_area', file_path=cfg_path,
convert_to_type=float, can_be_unset=True)
- # Create ModifyFsurdat object
- modify_fsurdat = ModifyFsurdat.init_from_file(fsurdat_in,
- lnd_lon_1, lnd_lon_2, lnd_lat_1, lnd_lat_2, landmask_file)
-
# ------------------------------
# modify surface data properties
# ------------------------------
@@ -112,25 +114,33 @@ def fsurdat_modifier(cfg_path):
if idealized:
modify_fsurdat.set_idealized() # set 2D variables
# set 3D and 4D variables pertaining to natural vegetation
- modify_fsurdat.set_dom_nat_pft(dom_nat_pft=0, lai=[], sai=[],
- hgt_top=[], hgt_bot=[])
-
- if dom_nat_pft is not None: # overwrite "idealized" value
- modify_fsurdat.set_dom_nat_pft(dom_nat_pft=dom_nat_pft,
- lai=lai, sai=sai,
- hgt_top=hgt_top, hgt_bot=hgt_bot)
+ modify_fsurdat.set_dom_plant(dom_plant=0, lai=[], sai=[],
+ hgt_top=[], hgt_bot=[])
+ logger.info('idealized complete')
if max_sat_area is not None: # overwrite "idealized" value
modify_fsurdat.setvar_lev0('FMAX', max_sat_area)
+ logger.info('max_sat_area complete')
if std_elev is not None: # overwrite "idealized" value
modify_fsurdat.setvar_lev0('STD_ELEV', std_elev)
+ logger.info('std_elev complete')
if soil_color is not None: # overwrite "idealized" value
modify_fsurdat.setvar_lev0('SOIL_COLOR', soil_color)
+ logger.info('soil_color complete')
if zero_nonveg:
modify_fsurdat.zero_nonveg()
+ logger.info('zero_nonveg complete')
+
+ # The set_dom_plant call follows zero_nonveg because it modifies PCT_NATVEG
+ # and PCT_CROP in the user-defined rectangle
+ if dom_plant is not None:
+ modify_fsurdat.set_dom_plant(dom_plant=dom_plant,
+ lai=lai, sai=sai,
+ hgt_top=hgt_top, hgt_bot=hgt_bot)
+ logger.info('dom_plant complete')
# ----------------------------------------------
# Output the now modified CTSM surface data file
diff --git a/python/ctsm/modify_fsurdat/modify_fsurdat.py b/python/ctsm/modify_fsurdat/modify_fsurdat.py
index bf1a5e8c9b..10803875c7 100644
--- a/python/ctsm/modify_fsurdat/modify_fsurdat.py
+++ b/python/ctsm/modify_fsurdat/modify_fsurdat.py
@@ -11,7 +11,9 @@
import numpy as np
import xarray as xr
-from ctsm.utils import abort, get_ctsm_git_sha, update_metadata, lon_range_0_to_360
+from ctsm.git_utils import get_ctsm_git_short_hash
+from ctsm.utils import abort, update_metadata
+from ctsm.config_utils import lon_range_0_to_360
logger = logging.getLogger(__name__)
@@ -25,7 +27,7 @@ def __init__(self, my_data, lon_1, lon_2, lat_1, lat_2, landmask_file):
self.file = my_data
- self.not_rectangle = self._get_not_rectangle(
+ self.rectangle = self._get_rectangle(
lon_1=lon_1, lon_2=lon_2,
lat_1=lat_1, lat_2=lat_2,
longxy=self.file.LONGXY, latixy=self.file.LATIXY)
@@ -34,20 +36,21 @@ def __init__(self, my_data, lon_1, lon_2, lat_1, lat_2, landmask_file):
# overwrite self.not_rectangle with data from
# user-specified .nc file in the .cfg file
self._landmask_file = xr.open_dataset(landmask_file)
- rectangle = self._landmask_file.landmask
- self.not_rectangle = np.logical_not(rectangle)
+ self.rectangle = self._landmask_file.landmask
+
+ self.not_rectangle = np.logical_not(self.rectangle)
@classmethod
def init_from_file(cls, fsurdat_in, lon_1, lon_2, lat_1, lat_2, landmask_file):
"""Initialize a ModifyFsurdat object from file fsurdat_in"""
- logger.info( 'Opening fsurdat_in file to be modified: %s', fsurdat_in)
+ logger.info('Opening fsurdat_in file to be modified: %s', fsurdat_in)
my_file = xr.open_dataset(fsurdat_in)
return cls(my_file, lon_1, lon_2, lat_1, lat_2, landmask_file)
@staticmethod
- def _get_not_rectangle(lon_1, lon_2, lat_1, lat_2, longxy, latixy):
+ def _get_rectangle(lon_1, lon_2, lat_1, lat_2, longxy, latixy):
"""
Description
-----------
@@ -84,9 +87,8 @@ def _get_not_rectangle(lon_1, lon_2, lat_1, lat_2, longxy, latixy):
# union rectangles overlap
rectangle = np.logical_and(union_1, union_2)
- not_rectangle = np.logical_not(rectangle)
- return not_rectangle
+ return rectangle
def write_output(self, fsurdat_in, fsurdat_out):
@@ -108,7 +110,7 @@ def write_output(self, fsurdat_in, fsurdat_out):
title = 'Modified fsurdat file'
summary = 'Modified fsurdat file'
contact = 'N/A'
- data_script = os.path.abspath(__file__) + " -- " + get_ctsm_git_sha()
+ data_script = os.path.abspath(__file__) + " -- " + get_ctsm_git_short_hash()
description = 'Modified this file: ' + fsurdat_in
update_metadata(self.file, title=title, summary=summary,
contact=contact, data_script=data_script,
@@ -127,36 +129,52 @@ def write_output(self, fsurdat_in, fsurdat_out):
self.file.close()
- def set_dom_nat_pft(self, dom_nat_pft, lai, sai, hgt_top, hgt_bot):
+ def set_dom_plant(self, dom_plant, lai, sai, hgt_top, hgt_bot):
"""
Description
-----------
In rectangle selected by user (or default -90 to 90 and 0 to 360),
- replace fsurdat file's PCT_NAT_PFT with:
- - 100 for dom_nat_pft selected by user
- - 0 for all other non-crop PFTs
+ replace fsurdat file's PCT_NAT_PFT or PCT_CFT with:
+ - 100 for dom_plant selected by user
+ - 0 for all other PFTs/CFTs
If user has specified lai, sai, hgt_top, hgt_bot, replace these with
- values selected by the user for dom_nat_pft
+ values selected by the user for dom_plant
Arguments
---------
- dom_nat_pft:
- (int) User's entry of PFT to be set to 100% everywhere
+ dom_plant:
+ (int) User's entry of PFT/CFT to be set to 100% everywhere
lai:
- (float) User's entry of MONTHLY_LAI for their dom_nat_pft
+ (float) User's entry of MONTHLY_LAI for their dom_plant
sai:
- (float) User's entry of MONTHLY_SAI for their dom_nat_pft
+ (float) User's entry of MONTHLY_SAI for their dom_plant
hgt_top:
- (float) User's entry of MONTHLY_HEIGHT_TOP for their dom_nat_pft
+ (float) User's entry of MONTHLY_HEIGHT_TOP for their dom_plant
hgt_bot:
- (float) User's entry of MONTHLY_HEIGHT_BOT for their dom_nat_pft
+ (float) User's entry of MONTHLY_HEIGHT_BOT for their dom_plant
"""
- for pft in self.file.natpft:
- # initialize 3D variable; set outside the loop below
- self.setvar_lev1('PCT_NAT_PFT', val=0, lev1_dim=pft)
- # set 3D variable value for dom_nat_pft
- self.setvar_lev1('PCT_NAT_PFT', val=100, lev1_dim=dom_nat_pft)
+ # If dom_plant is a cft, add PCT_NATVEG to PCT_CROP in the rectangle
+ # and remove same from PCT_NATVEG, i.e. set PCT_NATVEG = 0.
+ if dom_plant > max(self.file.natpft): # dom_plant is a cft (crop)
+ self.file['PCT_CROP'] = \
+ self.file['PCT_CROP'] + \
+ self.file['PCT_NATVEG'].where(self.rectangle, other=0)
+ self.setvar_lev0('PCT_NATVEG', 0)
+
+ for cft in self.file.cft:
+ cft_local = cft - (max(self.file.natpft) + 1)
+ # initialize 3D variable; set outside the loop below
+ self.setvar_lev1('PCT_CFT', val=0, lev1_dim=cft_local)
+
+ # set 3D variable
+ self.setvar_lev1('PCT_CFT', val=100, lev1_dim=dom_plant-(max(self.file.natpft)+1))
+ else: # dom_plant is a pft (not a crop)
+ for pft in self.file.natpft:
+ # initialize 3D variable; set outside the loop below
+ self.setvar_lev1('PCT_NAT_PFT', val=0, lev1_dim=pft)
+ # set 3D variable value for dom_plant
+ self.setvar_lev1('PCT_NAT_PFT', val=100, lev1_dim=dom_plant)
# dictionary of 4d variables to loop over
vars_4d = {'MONTHLY_LAI': lai,
@@ -165,26 +183,26 @@ def set_dom_nat_pft(self, dom_nat_pft, lai, sai, hgt_top, hgt_bot):
'MONTHLY_HEIGHT_BOT': hgt_bot}
for var, val in vars_4d.items():
if val is not None:
- self.set_lai_sai_hgts(dom_nat_pft=dom_nat_pft,
- var=var, val=val)
+ self.set_lai_sai_hgts(dom_plant=dom_plant, var=var, val=val)
- def set_lai_sai_hgts(self, dom_nat_pft, var, val):
+ def set_lai_sai_hgts(self, dom_plant, var, val):
"""
Description
-----------
If user has specified lai, sai, hgt_top, hgt_bot, replace these with
- values selected by the user for dom_nat_pft. Else do nothing.
+ values selected by the user for dom_plant. Else do nothing.
"""
- if dom_nat_pft == 0: # bare soil: var must equal 0
- val = [0] * 12
- if len(val) != 12:
- errmsg = 'Error: Variable should have exactly 12 ' \
- 'entries in the configure file: ' + var
+ months = int(max(self.file.time)) # 12 months
+ if dom_plant == 0: # bare soil: var must equal 0
+ val = [0] * months
+ if len(val) != months:
+ errmsg = 'Error: Variable should have exactly ' + months + \
+ ' entries in the configure file: ' + var
abort(errmsg)
for mon in self.file.time - 1: # loop over 12 months
- # set 4D variable to value for dom_nat_pft
- self.setvar_lev2(var, val[int(mon)], lev1_dim=dom_nat_pft,
+ # set 4D variable to value for dom_plant
+ self.setvar_lev2(var, val[int(mon)], lev1_dim=dom_plant,
lev2_dim=mon)
diff --git a/python/ctsm/run_sys_tests.py b/python/ctsm/run_sys_tests.py
index f45aa81927..a72ac59a98 100644
--- a/python/ctsm/run_sys_tests.py
+++ b/python/ctsm/run_sys_tests.py
@@ -1,6 +1,5 @@
"""Functions implementing run_sys_tests command"""
-from __future__ import print_function
import argparse
import logging
import os
@@ -13,7 +12,8 @@
from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args
from ctsm.machine_utils import get_machine_name
-from ctsm.machine import create_machine, get_possibly_overridden_mach_value
+from ctsm.machine import (create_machine, get_possibly_overridden_mach_value,
+ CREATE_TEST_QUEUE_UNSPECIFIED)
from ctsm.machine_defaults import MACHINE_DEFAULTS
from ctsm.os_utils import make_link
from ctsm.path_utils import path_to_ctsm_root
@@ -119,13 +119,15 @@ def run_sys_tests(machine, cime_path,
that is None, then the test suite will determine it automatically)
walltime (str): walltime to use for each test (if not provided, the test suite will
determine it automatically)
- queue (str): queue to use for each test (if not provided, the test suite will
- determine it automatically)
+ queue (str): queue to use for each test (if not provided, will use the default for
+ this machine based on the passed-in machine object; if that is unspecified, then
+ the test suite will determine it automatically)
retry (int): retry value to pass to create_test (if not provided, will use the default
for this machine)
extra_create_test_args (str): any extra arguments to create_test, as a single,
space-delimited string
testlist: list of strings giving test names to run
+
"""
num_provided_options = ((suite_name is not None) +
(testfile is not None) +
@@ -148,6 +150,20 @@ def run_sys_tests(machine, cime_path,
retry_final = get_possibly_overridden_mach_value(machine,
varname='create_test_retry',
value=retry)
+ # Note the distinction between a queue of None and a queue of
+ # CREATE_TEST_QUEUE_UNSPECIFIED in the following: If queue is None (meaning that the
+ # user hasn't specified a '--queue' argument to run_sys_tests), then we'll use the
+ # queue specified in the machine object; if queue is CREATE_TEST_QUEUE_UNSPECIFIED,
+ # then we'll force queue_final to be None, which means we won't add a '--queue'
+ # argument to create_test, regardless of what is specified in the machine object.
+ # (It's also possible for the machine object to specify a queue of
+ # CREATE_TEST_QUEUE_UNSPECIFIED, which means that we won't use a '--queue' argument to
+ # create_test unless the user specifies a '--queue' argument to run_sys_tests.)
+ queue_final = get_possibly_overridden_mach_value(machine,
+ varname='create_test_queue',
+ value=queue)
+ if queue_final == CREATE_TEST_QUEUE_UNSPECIFIED:
+ queue_final = None
if not skip_git_status:
_record_git_status(testroot, retry_final, dry_run)
@@ -159,7 +175,7 @@ def run_sys_tests(machine, cime_path,
baseline_root=baseline_root_final,
account=machine.account,
walltime=walltime,
- queue=queue,
+ queue=queue_final,
retry=retry_final,
rerun_existing_failures=rerun_existing_failures,
extra_create_test_args=extra_create_test_args)
@@ -309,7 +325,11 @@ def _commandline_args():
parser.add_argument('--queue',
help='Queue to which tests are submitted.\n'
- 'If not provided, uses machine default.')
+ 'The special value "{}" means do not add a --queue option to create_test,\n'
+ 'instead allowing CIME to pick an appropriate queue for each test\n'
+ 'using its standard mechanisms.\n'
+ 'Default for this machine: {}'.format(
+ CREATE_TEST_QUEUE_UNSPECIFIED, default_machine.create_test_queue))
parser.add_argument('--retry', type=int,
help='Argument to create_test: Number of times to retry failed tests.\n'
diff --git a/python/ctsm/site_and_regional/__init__.py b/python/ctsm/site_and_regional/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py
new file mode 100644
index 0000000000..d0567f68aa
--- /dev/null
+++ b/python/ctsm/site_and_regional/base_case.py
@@ -0,0 +1,241 @@
+"""
+This module includes the definition for a parent class for SinglePointCase
+and RegionalCase. The common functionalities of SinglePointCase and
+RegionalCase are defined in this Class.
+"""
+# -- Import libraries
+
+# -- standard libraries
+import os.path
+import logging
+from collections import namedtuple
+
+from datetime import date
+from getpass import getuser
+
+# -- 3rd party libraries
+import numpy as np
+import xarray as xr
+
+# -- import local classes for this script
+from ctsm.utils import abort
+from ctsm.git_utils import get_ctsm_git_short_hash
+
+USRDAT_DIR = "CLM_USRDAT_DIR"
+logger = logging.getLogger(__name__)
+
+# named tuple for datm input/output files and folder names
+DatmFiles = namedtuple(
+ "DatmFiles",
+ "indir outdir fdomain_in dir_solar dir_prec dir_tpqw tag_solar tag_prec tag_tpqw name_solar "
+ "name_prec name_tpqw ",
+)
+
+
+class BaseCase:
+ """
+ Parent class to SinglePointCase and RegionalCase
+ ...
+ Attributes
+ ----------
+ create_domain : bool
+ flag for creating domain file
+ create_surfdata : bool
+ flag for creating surface dataset
+ create_landuse : bool
+ flag for creating landuse file
+ create_datm : bool
+ flag for creating DATM files
+ create_user_mods
+ flag for creating a user_mods directory
+ overwrite : bool
+ flag for overwriting if the file already exists
+
+ Methods
+ -------
+ create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim )
+ create 1d coordinate variables to enable sel() method
+ update_metadata(nc)
+ Class method for adding some new attributes (such as date, username) and
+ remove the old attributes from the netcdf file.
+ write_to_file:
+ Writes text to a file, surrounding text with \n characters
+ write_to_netcdf:
+ write xarray dataset to netcdf
+ """
+
+ def __init__(
+ self,
+ create_domain,
+ create_surfdata,
+ create_landuse,
+ create_datm,
+ create_user_mods,
+ overwrite,
+ ):
+ """
+ Initializes BaseCase with the given arguments.
+
+ Parameters
+ ----------
+ create_domain : bool
+ Flag for creating domain file a region/single point
+ create_surfdata : bool
+ Flag for creating domain file a region/single point
+ create_landuse : bool
+ Flag for creating landuse file a region/single point
+ create_datmdata : bool
+ Flag for creating datm files a region/single point
+ create_user_mods : bool
+ Flag for creating user mods directories and files for running CTSM
+ overwrite : bool
+ flag for overwriting if the file already exists
+ """
+ self.create_domain = create_domain
+ self.create_surfdata = create_surfdata
+ self.create_landuse = create_landuse
+ self.create_datm = create_datm
+ self.create_user_mods = create_user_mods
+ self.overwrite = overwrite
+
+ def __str__(self):
+ """
+ Converts ingredients of the BaseCase to string for printing.
+ """
+ return "{}\n{}".format(
+ str(self.__class__),
+ "\n".join(
+ (
+ "{} = {}".format(str(key), str(self.__dict__[key]))
+ for key in sorted(self.__dict__)
+ )
+ ),
+ )
+
+ @staticmethod
+ def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim):
+ """
+ Create 1d coordinate variables for a netcdf file to enable sel() method
+
+ Parameters
+ ----------
+ filename (str) : name of the netcdf file
+ lon_varname (str) : variable name that has 2d lon
+ lat_varname (str) : variable name that has 2d lat
+ x_dim (str) : dimension name in X -- lon
+ y_dim (str): dimension name in Y -- lat
+
+ Raises
+ ------
+ None
+
+ Returns
+ -------
+ f_out (xarray Dataset): Xarray Dataset with 1-d coords
+
+ """
+
+ if os.path.exists(filename):
+ logger.debug("Open file: %s", filename)
+
+ f_in = xr.open_dataset(filename)
+ else:
+ err_msg = "File not found : " + filename
+ abort(err_msg)
+
+ # create 1d coordinate variables to enable sel() method
+ lon0 = np.asarray(f_in[lon_varname][0, :])
+ lat0 = np.asarray(f_in[lat_varname][:, 0])
+ lon = xr.DataArray(lon0, name="lon", dims=x_dim, coords={x_dim: lon0})
+ lat = xr.DataArray(lat0, name="lat", dims=y_dim, coords={y_dim: lat0})
+
+ f_out = f_in.assign({"lon": lon, "lat": lat})
+
+ f_out.reset_coords([lon_varname, lat_varname])
+ f_in.close()
+ return f_out
+
+ @staticmethod
+ def update_metadata(nc_file):
+ """
+ Class method for adding some new attributes (such as date, username) and
+ remove the old attributes from the netcdf file.
+
+ Parameters
+ ----------
+ nc (xarray dataset) :
+ Xarray dataset of netcdf file that we'd want to update it's metadata.
+
+ Raises
+ ------
+ None
+
+ Returns
+ ------
+ None
+
+ """
+ # update attributes
+ today = date.today()
+ today_string = today.strftime("%Y-%m-%d")
+
+ # get git hash
+ sha = get_ctsm_git_short_hash()
+
+ nc_file.attrs["Created_on"] = today_string
+ nc_file.attrs["Created_by"] = getuser()
+ nc_file.attrs["Created_with"] = "./subset_data" + " -- " + sha
+
+ # delete unrelated attributes if they exist
+ del_attrs = [
+ "source_code",
+ "SVN_url",
+ "hostname",
+ "history",
+ "History_Log",
+ "Logname",
+ "Host",
+ "Version",
+ "Compiler_Optimized",
+ ]
+ attr_list = nc_file.attrs
+
+ for attr in del_attrs:
+ if attr in attr_list:
+ logger.debug("This attr should be deleted : %s", attr)
+ del nc_file.attrs[attr]
+
+ @staticmethod
+ def write_to_file(text, file_out):
+ """
+ Writes text to a file, surrounding text with \n characters
+ """
+ file_out.write("\n{}\n".format(text))
+
+ def write_to_netcdf(self, xr_ds, nc_fname):
+ """
+ Writes a netcdf file if
+ - the file does not exist.
+ or
+ - overwrite flag is chosen.
+
+ Args:
+ xr_ds : Xarray Dataset
+ The xarray dataset that we are write out to netcdf file.
+ nc_fname : str
+ Netcdf file name
+ Raises:
+ Error and aborts the code if the file exists and --overwrite is not used.
+ """
+ if not os.path.exists(nc_fname) or self.overwrite:
+ # mode 'w' overwrites file
+ xr_ds.to_netcdf(path=nc_fname, mode="w", format="NETCDF3_64BIT")
+ else:
+ err_msg = (
+ "File "
+ + nc_fname
+ + " already exists."
+ + "\n Either remove the file or use "
+ + "--overwrite to overwrite the existing files."
+ )
+ abort(err_msg)
diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py
new file mode 100644
index 0000000000..57bb8474f4
--- /dev/null
+++ b/python/ctsm/site_and_regional/regional_case.py
@@ -0,0 +1,228 @@
+"""
+This module includes the definition for a RegionalCase classs.
+"""
+# -- Import libraries
+# -- Import Python Standard Libraries
+import logging
+import os
+
+# -- 3rd party libraries
+import numpy as np
+
+# -- import local classes for this script
+from ctsm.site_and_regional.base_case import BaseCase, USRDAT_DIR
+from ctsm.utils import add_tag_to_filename
+
+logger = logging.getLogger(__name__)
+
+
+class RegionalCase(BaseCase):
+ """
+ A class to encapsulate regional cases.
+
+ ...
+ Attributes
+ ----------
+ lat1 : float
+ first (left) latitude of a region.
+ lat1 : float
+ second (right) latitude of a region.
+ lon1 : float
+ first (bottom) longitude of a region.
+ lon2 : float
+ second (top) longitude of a region.
+ reg_name: str -- default = None
+ Region's name
+ create_domain : bool
+ flag for creating domain file
+ create_surfdata : bool
+ flag for creating surface dataset
+ create_landuse : bool
+ flag for creating landuse file
+ create_datm : bool
+ flag for creating DATM files
+ create_user_mods : bool
+ flag for creating user mods files and folders
+ overwrite : bool
+ flag for over-writing files if they already exist
+
+
+ Methods
+ -------
+ create_tag
+ Create a tag for this region which is either
+ region's name or a combination of bounds of this
+ region lat1-lat2_lon1-lon2
+
+ create_domain_at_reg
+ Create domain file at this region
+
+ create_surfdata_at_reg
+ Create surface dataset at this region
+
+ create_landuse_at_reg
+ Create landuse file at this region
+
+ """
+
+ def __init__(
+ self,
+ lat1,
+ lat2,
+ lon1,
+ lon2,
+ reg_name,
+ create_domain,
+ create_surfdata,
+ create_landuse,
+ create_datm,
+ create_user_mods,
+ out_dir,
+ overwrite,
+ ):
+ """
+ Initializes RegionalCase with the given arguments.
+ """
+ super().__init__(
+ create_domain,
+ create_surfdata,
+ create_landuse,
+ create_datm,
+ create_user_mods,
+ overwrite,
+ )
+ self.lat1 = lat1
+ self.lat2 = lat2
+ self.lon1 = lon1
+ self.lon2 = lon2
+ self.reg_name = reg_name
+ self.out_dir = out_dir
+ self.create_tag()
+
+ def create_tag(self):
+ """
+ Create a tag for a region which is either the region name
+ or
+ the lat1-lat2_lon1-lon2 if the region name does not exist.
+ """
+ if self.reg_name:
+ self.tag = self.reg_name
+ else:
+ self.tag = "{}-{}_{}-{}".format(
+ str(self.lon1), str(self.lon2), str(self.lat1), str(self.lat2)
+ )
+
+ def create_domain_at_reg(self, indir, file):
+ """
+ Create domain file for this RegionalCase class.
+ """
+
+ # specify files
+ fdomain_in = os.path.join(indir, file)
+ fdomain_out = add_tag_to_filename(fdomain_in, self.tag)
+ logger.info("fdomain_in: %s", fdomain_in)
+ logger.info("fdomain_out: %s", os.path.join(self.out_dir, fdomain_out))
+ logger.info("Creating domain file at region: %s", self.tag)
+
+ # create 1d coordinate variables to enable sel() method
+ f_in = self.create_1d_coord(fdomain_in, "xc", "yc", "ni", "nj")
+ lat = f_in["lat"]
+ lon = f_in["lon"]
+
+ # subset longitude and latitude arrays
+ xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0]
+ yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0]
+ f_out = f_in.isel(nj=yind, ni=xind)
+
+ # update attributes
+ self.update_metadata(f_out)
+ f_out.attrs["Created_from"] = fdomain_in
+
+ # mode 'w' overwrites file
+ wfile = os.path.join(self.out_dir, fdomain_out)
+ self.write_to_netcdf(f_out, wfile)
+ logger.info("Successfully created file (fdomain_out) %s", wfile)
+ f_in.close()
+ f_out.close()
+
+ def create_surfdata_at_reg(self, indir, file, user_mods_dir):
+ """
+ Create surface data file for this RegionalCase class.
+ """
+
+ logger.info("Creating surface dataset file at region: %s", self.tag)
+
+ # specify files
+ fsurf_in = os.path.join(indir, file)
+ fsurf_out = add_tag_to_filename(fsurf_in, self.tag)
+ logger.info("fsurf_in: %s", fsurf_in)
+ logger.info("fsurf_out: %s", os.path.join(self.out_dir, fsurf_out))
+
+ # create 1d coordinate variables to enable sel() method
+ f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat")
+ lat = f_in["lat"]
+ lon = f_in["lon"]
+
+ # subset longitude and latitude arrays
+ xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0]
+ yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0]
+ f_out = f_in.isel(lsmlat=yind, lsmlon=xind)
+
+ # update attributes
+ self.update_metadata(f_out)
+ f_out.attrs["Created_from"] = fsurf_in
+
+ # mode 'w' overwrites file
+ wfile = os.path.join(self.out_dir, fsurf_out)
+ self.write_to_netcdf(f_out, wfile)
+ logger.info("created file (fsurf_out) %s", wfile)
+ f_in.close()
+ f_out.close()
+
+ # write to user_nl_clm if specified
+ if self.create_user_mods:
+ with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm:
+ line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, fsurf_out))
+ self.write_to_file(line, nl_clm)
+
+ def create_landuse_at_reg(self, indir, file, user_mods_dir):
+ """
+ Create land use data file for this RegionalCase class.
+ """
+
+ logger.info("Creating landuse file at region: %s", self.tag)
+
+ # specify files
+ fluse_in = os.path.join(indir, file)
+ fluse_out = add_tag_to_filename(fluse_in, self.tag)
+ logger.info("fluse_in: %s", fluse_in)
+ logger.info("fluse_out: %s", os.path.join(self.out_dir, fluse_out))
+
+ # create 1d coordinate variables to enable sel() method
+ f_in = self.create_1d_coord(fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat")
+ lat = f_in["lat"]
+ lon = f_in["lon"]
+
+ # subset longitude and latitude arrays
+ xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0]
+ yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0]
+ f_out = f_in.isel(lsmlat=yind, lsmlon=xind)
+
+ # update attributes
+ self.update_metadata(f_out)
+ f_out.attrs["Created_from"] = fluse_in
+
+ # mode 'w' overwrites file
+ wfile = os.path.join(self.out_dir, fluse_out)
+ self.write_to_netcdf(f_out, wfile)
+ logger.info("Successfully created file (fluse_out) %s", wfile)
+ f_in.close()
+ f_out.close()
+
+ if self.create_user_mods:
+ with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm:
+ # line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, fluse_out))
+ line = "flanduse_timeseries = '${}'".format(
+ os.path.join(USRDAT_DIR, fluse_out)
+ )
+ self.write_to_file(line, nl_clm)
diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py
new file mode 100644
index 0000000000..f290142272
--- /dev/null
+++ b/python/ctsm/site_and_regional/single_point_case.py
@@ -0,0 +1,700 @@
+"""
+This module includes the definition for SinglePointCase class.
+"""
+
+# -- Import libraries
+# -- Import Python Standard Libraries
+import logging
+import os
+import argparse
+
+# -- 3rd party libraries
+import numpy as np
+import xarray as xr
+
+# -- import local classes for this script
+from ctsm.site_and_regional.base_case import BaseCase, USRDAT_DIR, DatmFiles
+from ctsm.utils import add_tag_to_filename
+
+logger = logging.getLogger(__name__)
+
+NAT_PFT = 15
+MAX_PFT = 78
+
+# -- constants to represent months of year
+FIRST_MONTH = 1
+LAST_MONTH = 12
+
+
+class SinglePointCase(BaseCase):
+ """
+ A class to encapsulate everything for single point cases.
+
+ ...
+
+ Attributes
+ ----------
+ plat : float
+ latitude of the single point
+ plon : float
+ longitude of the single point
+ site_name: str -- default = None
+ Site name
+ create_domain : bool
+ flag for creating domain file
+ create_surfdata : bool
+ flag for creating surface dataset
+ create_landuse : bool
+ flag for creating landuse file
+ create_datm : bool
+ flag for creating DATM files
+ create_user_mods : bool
+ flag for creating user mods directories and files
+ dom_pft : int
+ dominant pft type for this single point (None if not specified)
+ pct_pft : list
+ weight or percentage of each pft.
+ num_pft : list
+ total number of pfts for surface dataset (if crop 78 pft, else 16 pft)
+ zero_nonveg_landunits : bool
+ flag for setting all non-vegetation landunits to zero
+ uni_snow : bool
+ flag for creating datasets using uniform snowpack
+ saturation_excess : bool
+ flag for making dataset using saturation excess
+ overwrite : bool
+ flag for over-writing files if they already exist
+
+ Methods
+ -------
+ create_tag
+ create a tag for single point which is the site name
+ or the "lon-lat" format if the site name does not exist.
+
+ create_domain_at_point
+ Create domain file at a single point.
+
+ create_landuse_at_point:
+ Create landuse file at a single point.
+
+ modify_surfdata_atpoint:
+ Modify surface dataset based on combination of user choices.
+
+ create_surfdata_at_point:
+ Create surface dataset at a single point.
+
+ create_datmdomain_at_point:
+ Create DATM domain file at a single point.
+
+ extract_datm_at:
+ Extract DATM for one file at a single point.
+
+ create_datm_at_point:
+ Extract all DATM data at a single point.
+ """
+
+ # pylint: disable=too-many-instance-attributes
+ # the ones we have are useful
+
+ def __init__(
+ self,
+ plat,
+ plon,
+ site_name,
+ create_domain,
+ create_surfdata,
+ create_landuse,
+ create_datm,
+ create_user_mods,
+ dom_pft,
+ pct_pft,
+ num_pft,
+ include_nonveg,
+ uni_snow,
+ cap_saturation,
+ out_dir,
+ overwrite,
+ ):
+ super().__init__(
+ create_domain,
+ create_surfdata,
+ create_landuse,
+ create_datm,
+ create_user_mods,
+ overwrite,
+ )
+ self.plat = plat
+ self.plon = plon
+ self.site_name = site_name
+ self.dom_pft = dom_pft
+ self.pct_pft = pct_pft
+ self.num_pft = num_pft
+ self.include_nonveg = include_nonveg
+ self.uni_snow = uni_snow
+ self.cap_saturation = cap_saturation
+ self.out_dir = out_dir
+
+ self.create_tag()
+ self.check_dom_pft()
+ self.check_nonveg()
+ self.check_pct_pft()
+
+ def create_tag(self):
+ """
+ Create a tag for single point which is the site name
+ or the "lon-lat" format if the site name does not exist.
+ """
+ if self.site_name:
+ self.tag = self.site_name
+ else:
+ self.tag = "{}_{}".format(str(self.plon), str(self.plat))
+
+ def check_dom_pft(self):
+ """
+ A function to sanity check values in dom_pft:
+
+ - Compare dom_pft (values if more than one) with num_pft:
+ i.e. If dom_pft is 18 without crop it fails.
+
+ - Check for mixed land-units:
+ If we have more than one dom_pft, they should be in the
+ same range.
+ e.g. If users specified multiple dom_pft, they should be
+ either in :
+ - 0 - NAT_PFT-1 range
+ or
+ - NAT_PFT - MAX_PFT range
+ - give an error : mixed land units not possible.
+
+ -------------
+ Raises:
+ Error (ArgumentTypeError):
+ If any dom_pft is bigger than MAX_PFT.
+ Error (ArgumentTypeError):
+ If any dom_pft is less than 1.
+ Error (ArgumentTypeError):
+ If mixed land units are chosen.
+ dom_pft values are both in range of (0 - NAT_PFT-1) and (NAT_PFT - MAX_PFT).
+
+
+ """
+
+ if self.dom_pft is None:
+ logger.warning(
+ "No dominant pft type is chosen. "
+ "If you want to choose a dominant pft type, please use --dompft flag."
+ )
+ else:
+ min_dom_pft = min(self.dom_pft)
+ max_dom_pft = max(self.dom_pft)
+
+ # -- check dom_pft values should be between 0-MAX_PFT
+ if min_dom_pft < 0 or max_dom_pft > MAX_PFT:
+ err_msg = "values for --dompft should be between 1 and 78."
+ raise argparse.ArgumentTypeError(err_msg)
+
+ # -- check dom_pft vs num_pft
+ if self.num_pft - 1 < max_dom_pft < MAX_PFT:
+ err_msg = "Please use --crop flag when --dompft is above 15."
+ raise argparse.ArgumentTypeError(err_msg)
+
+ # -- check if all dom_pft are in the same range:
+ if min_dom_pft < NAT_PFT <= max_dom_pft:
+ err_msg = """
+ \n
+ Subsetting using mixed land units is not possible.
+ Please make sure all --dompft values are in only
+ one of these ranges:
+ - 0-{} natural pfts
+ - {}-{} crop pfts (cfts)
+ """.format(
+ NAT_PFT - 1, NAT_PFT, MAX_PFT
+ )
+ raise argparse.ArgumentTypeError(err_msg)
+
+ def check_nonveg(self):
+ """
+ A function to check at least one of the following arguments is given:
+ --include-nonveg
+ --dompft DOMPFT
+
+ Basically, this function raises an error
+ when zero out non veg land units (by default true) and not provide a dominant pft:
+
+ The user can run ./subset_data using:
+ ./subset_data point --dompft
+ ./subset_data point --include-nonveg
+ ./subset_data point --dompft --include-nonveg
+
+ But this will raise an error:
+ ./subset_data point
+
+ By default include_nonveg = False, which means that it zeros out the non-veg landunits.
+ """
+
+ if not self.include_nonveg:
+ if self.dom_pft is None:
+ err_msg = """
+ \n
+ By default, this will zero out non-veg land units.
+ To include non-veg land units, you need to specify --include-nonveg flag.
+ To zero-out non-veg land units, you need to specify --dompft.
+
+ You should specify at least one of the following arguments:
+ --dompft DOMPFT
+ --include-nonveg
+ """
+ raise argparse.ArgumentTypeError(err_msg)
+
+ def check_pct_pft(self):
+ """
+ A function to error check pct_pft and calculate it if necessary.
+
+ If the user gives dom_pft and pct_pft :
+ - Check if length of dom_pft and pct_pft matches.
+ For example, --dompft 8 --pctpft 0.4 0.6 should give an error.
+
+ - Check if the sum of pct_pft is equal to 100% or 1.
+ For example, --dompft 8 14 --pctpft 0.6 0.9 should give an error.
+
+ - If the sum of pct_pft is 1, convert it to % (multiply by 100)
+
+ If the user gives one or more dom_pft but no pct_pft, assume equal pct_pft:
+ - pct_pft = 100 / number of given dom_pft
+ For example, if two dom_pft (s) are given, each of them is 50%.
+
+ """
+
+ # -- if both dom_pft and pct_pft is given:
+ if self.dom_pft and self.pct_pft:
+
+ # -- check if the same number of values are given
+ if len(self.dom_pft) != len(self.pct_pft):
+ err_msg = "Please provide the same number of inputs for --dompft and --pctpft."
+ raise argparse.ArgumentTypeError(err_msg)
+
+ # -- check if the sum of pct_pft is equal to 1 or 100
+ if sum(self.pct_pft) != 1 and sum(self.pct_pft) != 100:
+ err_msg = "Sum of --pctpft values should be equal to 1 or 100."
+ raise argparse.ArgumentTypeError(err_msg)
+
+ # -- convert franction to percentage
+ if sum(self.pct_pft) == 1:
+ self.pct_pft = [pct * 100 for pct in self.pct_pft]
+
+ # -- if the user did not give --pctpft at all (assume equal percentage)
+ elif self.dom_pft:
+ pct = 100 / len(self.dom_pft)
+ self.pct_pft = [pct for pft in self.dom_pft]
+
+ # -- if the user only gave --pctpft with no --dompft
+ elif self.pct_pft:
+ err_msg = """
+ \n
+ --pctpft is specfied without --dompft.
+ Please specify your dominant pft by --dompft.
+ """
+ raise argparse.ArgumentTypeError(err_msg)
+
+ logger.info(" - dominant pft(s) : %s", self.dom_pft)
+ logger.info(" - percentage of dominant pft(s) : %s", self.pct_pft)
+
+ def create_domain_at_point(self, indir, file):
+ """
+ Create domain file for this SinglePointCase class.
+ """
+ logger.info(
+ "----------------------------------------------------------------------"
+ )
+ logger.info(
+ "Creating domain file at %s, %s.", self.plon.__str__(), self.plat.__str__()
+ )
+
+ # specify files
+ fdomain_in = os.path.join(indir, file)
+ fdomain_out = add_tag_to_filename(fdomain_in, self.tag)
+ logger.info("fdomain_in: %s", fdomain_in)
+ logger.info("fdomain_out: %s", os.path.join(self.out_dir, fdomain_out))
+
+ # create 1d coordinate variables to enable sel() method
+ f_in = self.create_1d_coord(fdomain_in, "xc", "yc", "ni", "nj")
+
+ # extract gridcell closest to plon/plat
+ f_out = f_in.sel(ni=self.plon, nj=self.plat, method="nearest")
+
+ # expand dimensions
+ f_out = f_out.expand_dims(["nj", "ni"])
+
+ # update attributes
+ self.update_metadata(f_out)
+ f_out.attrs["Created_from"] = fdomain_in
+
+ wfile = os.path.join(self.out_dir, fdomain_out)
+ self.write_to_netcdf(f_out, wfile)
+ logger.info("Successfully created file (fdomain_out) %s", wfile)
+ f_in.close()
+ f_out.close()
+
+ def create_landuse_at_point(self, indir, file, user_mods_dir):
+ """
+ Create landuse file at a single point.
+ """
+ logger.info(
+ "----------------------------------------------------------------------"
+ )
+ logger.info(
+ "Creating land use file at %s, %s.",
+ self.plon.__str__(),
+ self.plat.__str__(),
+ )
+
+ # specify files
+ fluse_in = os.path.join(indir, file)
+ fluse_out = add_tag_to_filename(fluse_in, self.tag)
+ logger.info("fluse_in: %s", fluse_in)
+ logger.info("fluse_out: %s", os.path.join(self.out_dir, fluse_out))
+
+ # create 1d coordinate variables to enable sel() method
+ f_in = self.create_1d_coord(fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat")
+
+ # extract gridcell closest to plon/plat
+ f_out = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest")
+
+ # expand dimensions
+ f_out = f_out.expand_dims(["lsmlat", "lsmlon"])
+
+ # specify dimension order
+ f_out = f_out.transpose("time", "cft", "natpft", "lsmlat", "lsmlon")
+
+ # revert expand dimensions of YEAR
+ year = np.squeeze(np.asarray(f_out["YEAR"]))
+ temp_xr = xr.DataArray(
+ year, coords={"time": f_out["time"]}, dims="time", name="YEAR"
+ )
+ temp_xr.attrs["units"] = "unitless"
+ temp_xr.attrs["long_name"] = "Year of PFT data"
+ f_out["YEAR"] = temp_xr
+
+ # update attributes
+ self.update_metadata(f_out)
+ f_out.attrs["Created_from"] = fluse_in
+
+ wfile = os.path.join(self.out_dir, fluse_out)
+ self.write_to_netcdf(f_out, wfile)
+ logger.info("Successfully created file (fluse_out), %s", wfile)
+ f_in.close()
+ f_out.close()
+
+ # write to user_nl_clm data if specified
+ if self.create_user_mods:
+ with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm:
+ line = "flanduse_timeseries = '${}'".format(
+ os.path.join(USRDAT_DIR, fluse_out)
+ )
+ self.write_to_file(line, nl_clm)
+
+ def modify_surfdata_atpoint(self, f_orig):
+ """
+ Function to modify surface dataset based on the user flags chosen.
+ """
+ f_mod = f_orig.copy(deep=True)
+
+ # -- modify surface data properties
+ if self.dom_pft is not None:
+ max_dom_pft = max(self.dom_pft)
+ # -- First initialize everything:
+ if max_dom_pft < NAT_PFT:
+ f_mod["PCT_NAT_PFT"][:, :, :] = 0
+ else:
+ f_mod["PCT_CFT"][:, :, :] = 0
+
+ # Do we need to initialize these here?
+ # Because we set them in include_nonveg
+ # f_mod["PCT_NATVEG"][:, :] = 0
+ # f_mod["PCT_CROP"][:, :] = 0
+
+ # -- loop over all dom_pft and pct_pft
+ zip_pfts = zip(self.dom_pft, self.pct_pft)
+ for dom_pft, pct_pft in zip_pfts:
+ if dom_pft < NAT_PFT:
+ f_mod["PCT_NAT_PFT"][:, :, dom_pft] = pct_pft
+ else:
+ dom_pft = dom_pft - NAT_PFT
+ f_mod["PCT_CFT"][:, :, dom_pft] = pct_pft
+
+ # -------------------------------
+ # By default include_nonveg=False
+ # When we use --include-nonveg we turn it to True
+ # Therefore by default we are hitting the following if:
+
+ if not self.include_nonveg:
+ logger.info("Zeroing out non-vegetation land units in the surface data.")
+ f_mod["PCT_LAKE"][:, :] = 0.0
+ f_mod["PCT_WETLAND"][:, :] = 0.0
+ f_mod["PCT_URBAN"][:, :, :] = 0.0
+ f_mod["PCT_GLACIER"][:, :] = 0.0
+
+ max_dom_pft = max(self.dom_pft)
+ if max_dom_pft < NAT_PFT:
+ f_mod["PCT_NATVEG"][:, :] = 100
+ f_mod["PCT_CROP"][:, :] = 0
+ else:
+ f_mod["PCT_NATVEG"][:, :] = 0
+ f_mod["PCT_CROP"][:, :] = 100
+
+ else:
+ logger.info(
+ "You chose --include-nonveg --> \
+ Do not zero non-vegetation land units in the surface data."
+ )
+
+ if self.uni_snow:
+ f_mod["STD_ELEV"][:, :] = 20.0
+ if self.cap_saturation:
+ f_mod["FMAX"][:, :] = 0.0
+
+ return f_mod
+
+ def create_surfdata_at_point(self, indir, file, user_mods_dir):
+ """
+ Create surface data file at a single point.
+ """
+ # pylint: disable=too-many-statements
+ logger.info(
+ "----------------------------------------------------------------------"
+ )
+ logger.info(
+ "Creating surface dataset file at %s, %s",
+ self.plon.__str__(),
+ self.plat.__str__(),
+ )
+
+ # specify file
+ fsurf_in = os.path.join(indir, file)
+ fsurf_out = add_tag_to_filename(fsurf_in, self.tag)
+ logger.info("fsurf_in: %s", fsurf_in)
+ logger.info("fsurf_out: %s", os.path.join(self.out_dir, fsurf_out))
+
+ # create 1d coordinate variables to enable sel() method
+ f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat")
+
+ # extract gridcell closest to plon/plat
+ f_tmp = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest")
+
+ # expand dimensions
+ f_tmp = f_tmp.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True)
+
+ f_out = self.modify_surfdata_atpoint(f_tmp)
+
+ # specify dimension order
+ f_out = f_out.transpose(
+ "time",
+ "cft",
+ "lsmpft",
+ "natpft",
+ "nglcec",
+ "nglcecp1",
+ "nlevsoi",
+ "nlevurb",
+ "numrad",
+ "numurbl",
+ "lsmlat",
+ "lsmlon",
+ )
+
+ # update lsmlat and lsmlon to match site specific instead of the nearest point
+ # we do this so that if we create user_mods the PTS_LON and PTS_LAT in CIME match
+ # the surface data coordinates - which is required
+ f_out["lsmlon"] = np.atleast_1d(self.plon)
+ f_out["lsmlat"] = np.atleast_1d(self.plat)
+ f_out["LATIXY"][:, :] = self.plat
+ f_out["LONGXY"][:, :] = self.plon
+
+ # update attributes
+ self.update_metadata(f_out)
+ f_out.attrs["Created_from"] = fsurf_in
+
+ wfile = os.path.join(self.out_dir, fsurf_out)
+ self.write_to_netcdf(f_out, wfile)
+ logger.info("Successfully created file (fsurf_out) %s", wfile)
+ f_in.close()
+ f_tmp.close()
+ f_out.close()
+
+ # write to user_nl_clm if specified
+ if self.create_user_mods:
+ with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm:
+ line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, fsurf_out))
+ self.write_to_file(line, nl_clm)
+
+ def create_datmdomain_at_point(self, datm_tuple: DatmFiles):
+ """
+ Create DATM domain file at a single point
+ """
+ logger.info(
+ "----------------------------------------------------------------------"
+ )
+ logger.info(
+ "Creating DATM domain file at %s, %s",
+ self.plon.__str__(),
+ self.plat.__str__(),
+ )
+
+ # specify files
+ fdatmdomain_in = os.path.join(datm_tuple.indir, datm_tuple.fdomain_in)
+ datm_file = add_tag_to_filename(fdatmdomain_in, self.tag)
+ fdatmdomain_out = os.path.join(datm_tuple.outdir, datm_file)
+ logger.info("fdatmdomain_in: %s", fdatmdomain_in)
+ logger.info("fdatmdomain out: %s", os.path.join(self.out_dir, fdatmdomain_out))
+
+ # create 1d coordinate variables to enable sel() method
+ f_in = self.create_1d_coord(fdatmdomain_in, "xc", "yc", "ni", "nj")
+
+ # extract gridcell closest to plon/plat
+ f_out = f_in.sel(ni=self.plon, nj=self.plat, method="nearest")
+
+ # expand dimensions
+ f_out = f_out.expand_dims(["nj", "ni"])
+
+ # update attributes
+ self.update_metadata(f_out)
+ f_out.attrs["Created_from"] = fdatmdomain_in
+
+ wfile = os.path.join(self.out_dir, fdatmdomain_out)
+ self.write_to_netcdf(f_out, wfile)
+ logger.info("Successfully created file (fdatmdomain_out) : %s", wfile)
+ f_in.close()
+ f_out.close()
+
+ def extract_datm_at(self, file_in, file_out):
+ """
+ Create a DATM dataset at a point.
+ """
+ # create 1d coordinate variables to enable sel() method
+ f_in = self.create_1d_coord(file_in, "LONGXY", "LATIXY", "lon", "lat")
+
+ # extract gridcell closest to plon/plat
+ f_out = f_in.sel(lon=self.plon, lat=self.plat, method="nearest")
+
+ # expand dimensions
+ f_out = f_out.expand_dims(["lat", "lon"])
+
+ # specify dimension order
+ f_out = f_out.transpose("scalar", "time", "lat", "lon")
+
+ # update attributes
+ self.update_metadata(f_out)
+ f_out.attrs["Created_from"] = file_in
+
+ self.write_to_netcdf(f_out, file_out)
+ logger.info("Successfully created file : %s", file_out)
+ f_in.close()
+ f_out.close()
+
+ def write_shell_commands(self, file):
+ """
+ writes out xml commands commands to a file (i.e. shell_commands) for single-point runs
+ """
+ # write_to_file surrounds text with newlines
+ with open(file, "w") as nl_file:
+ self.write_to_file(
+ "# Change below line if you move the subset data directory", nl_file
+ )
+ self.write_to_file(
+ "./xmlchange {}={}".format(USRDAT_DIR, self.out_dir), nl_file
+ )
+ self.write_to_file("./xmlchange PTS_LON={}".format(str(self.plon)), nl_file)
+ self.write_to_file("./xmlchange PTS_LAT={}".format(str(self.plat)), nl_file)
+ self.write_to_file("./xmlchange MPILIB=mpi-serial", nl_file)
+
+ def write_datm_streams_lines(self, streamname, datmfiles, file):
+ """
+ writes out lines for the user_nl_datm_streams file for a specific DATM stream
+ for using subset DATM data at a single point
+
+ streamname - stream name (e.g. TPQW)
+ datmfiles - comma-separated list (str) of DATM file names
+ file - file connection to user_nl_datm_streams file
+ """
+ self.write_to_file(
+ "{}:datafiles={}".format(streamname, ",".join(datmfiles)), file
+ )
+ self.write_to_file("{}:mapalgo=none".format(streamname), file)
+ self.write_to_file("{}:meshfile=none".format(streamname), file)
+
+ def create_datm_at_point(
+ self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_streams_file
+ ):
+ """
+ Create all of a DATM dataset at a point.
+ """
+ logger.info(
+ "----------------------------------------------------------------------"
+ )
+ logger.info(
+ "Creating DATM files at %s, %s", self.plon.__str__(), self.plat.__str__()
+ )
+
+ # -- create data files
+ infile = []
+ outfile = []
+ solarfiles = []
+ precfiles = []
+ tpqwfiles = []
+ for year in range(datm_syr, datm_eyr + 1):
+ ystr = str(year)
+ for month in range(FIRST_MONTH, LAST_MONTH + 1):
+ mstr = str(month)
+ if month < 10:
+ mstr = "0" + mstr
+
+ dtag = ystr + "-" + mstr
+
+ fsolar = os.path.join(
+ datm_tuple.indir,
+ datm_tuple.dir_solar,
+ "{}{}.nc".format(datm_tuple.tag_solar, dtag),
+ )
+ fsolar2 = "{}{}.{}.nc".format(datm_tuple.tag_solar, self.tag, dtag)
+ fprecip = os.path.join(
+ datm_tuple.indir,
+ datm_tuple.dir_prec,
+ "{}{}.nc".format(datm_tuple.tag_prec, dtag),
+ )
+ fprecip2 = "{}{}.{}.nc".format(datm_tuple.tag_prec, self.tag, dtag)
+ ftpqw = os.path.join(
+ datm_tuple.indir,
+ datm_tuple.dir_tpqw,
+ "{}{}.nc".format(datm_tuple.tag_tpqw, dtag),
+ )
+ ftpqw2 = "{}{}.{}.nc".format(datm_tuple.tag_tpqw, self.tag, dtag)
+
+ outdir = os.path.join(self.out_dir, datm_tuple.outdir)
+ infile += [fsolar, fprecip, ftpqw]
+ outfile += [
+ os.path.join(outdir, fsolar2),
+ os.path.join(outdir, fprecip2),
+ os.path.join(outdir, ftpqw2),
+ ]
+ solarfiles.append(
+ os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fsolar2)
+ )
+ precfiles.append(
+ os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fprecip2)
+ )
+ tpqwfiles.append(
+ os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, ftpqw2)
+ )
+
+ for idx, out_f in enumerate(outfile):
+ logger.debug(out_f)
+ self.extract_datm_at(infile[idx], out_f)
+
+ logger.info("All DATM files are created in: %s", datm_tuple.outdir)
+
+ # write to user_nl_datm_streams if specified
+ if self.create_user_mods:
+ with open(datm_streams_file, "a") as file:
+ self.write_datm_streams_lines(datm_tuple.name_solar, solarfiles, file)
+ self.write_datm_streams_lines(datm_tuple.name_prec, precfiles, file)
+ self.write_datm_streams_lines(datm_tuple.name_tpqw, tpqwfiles, file)
diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py
new file mode 100644
index 0000000000..a1b8d21c15
--- /dev/null
+++ b/python/ctsm/subset_data.py
@@ -0,0 +1,582 @@
+"""
+|------------------------------------------------------------------|
+|--------------------- Instructions -----------------------------|
+|------------------------------------------------------------------|
+Instructions for running on Cheyenne/Casper:
+load the following into your local environment
+ module load python
+ ncar_pylib
+-------------------------------------------------------------------
+To see the available options for single point or regional cases:
+ ./subset_data.py --help
+-------------------------------------------------------------------
+This script extracts domain files, surface dataset, and DATM files
+at either a single point or a region using a global dataset. Currently this
+script subsets default surface, landuse, and DATM files, which can be seen in
+the defaults.cfg file.
+
+To run a single-point or regional case using this data with the NUOPC driver,
+you must update the variable(s) `fsurdat` and/or `landuse` in the user_nl_clm namelist
+file to be the full path to the subset files. This script will automatically create this
+file using the flag --create-user-mods.
+To use subset climate data, the namelist file user_nl_datm_streams must also
+be updated - this script will automatically create this file with
+--create-user-mods. This flag will also create necessary single-point xml
+commands in the file shell_commands.
+
+To use the created user mods with a case use --user-mods-dir PATH/TO/USER/MODS
+in the ./create.newcase call.
+
+By default, this script only extracts surface dataset. For extracting other
+files, the appropriate flags should be used.
+
+To run this script the following packages are required:
+ - numpy
+ - xarray
+
+-------------------------------------------------------------------
+To run the script for a single point:
+ ./subset_data.py point
+
+To run the script for a region:
+ ./subset_data.py region
+
+To remove NPL from your environment on Cheyenne/Casper:
+ deactivate
+-------------------------------------------------------------------
+"""
+
+# TODO [NS]:
+# -[] Automatic downloading of missing files if they are missing
+
+# -- Import libraries
+
+# -- standard libraries
+import os
+import logging
+import argparse
+import textwrap
+import configparser
+
+from getpass import getuser
+from argparse import ArgumentParser
+
+# -- import local classes for this script
+from ctsm.site_and_regional.base_case import DatmFiles
+from ctsm.site_and_regional.single_point_case import SinglePointCase
+from ctsm.site_and_regional.regional_case import RegionalCase
+from ctsm.args_utils import plon_type, plat_type
+from ctsm.path_utils import path_to_ctsm_root
+
+# -- import ctsm logging flags
+from ctsm.ctsm_logging import (
+ setup_logging_pre_config,
+ add_logging_args,
+ process_logging_args,
+)
+
+DEFAULTS_FILE = "default_data.cfg"
+
+logger = logging.getLogger(__name__)
+
+
+def get_parser():
+ """
+ Get the parser object for subset_data.py script.
+
+ Returns:
+ parser (ArgumentParser): ArgumentParser which includes all the parser information.
+
+ """
+ parser = ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+
+ parser.print_usage = parser.print_help
+ subparsers = parser.add_subparsers(
+ help="Two possible ways to run this script, either:", dest="run_type"
+ )
+ pt_parser = subparsers.add_parser("point", help="Run script for a single point.")
+ rg_parser = subparsers.add_parser("region", help="Run script for a region.")
+
+ # -- single point parser options
+ pt_parser.add_argument(
+ "--lat",
+ help="Single point latitude. [default: %(default)s]",
+ action="store",
+ dest="plat",
+ required=False,
+ type=plat_type,
+ default=42.5,
+ )
+ pt_parser.add_argument(
+ "--lon",
+ help="Single point longitude. [default: %(default)s]",
+ action="store",
+ dest="plon",
+ required=False,
+ type=plon_type,
+ default=287.8,
+ )
+ pt_parser.add_argument(
+ "--site",
+ help="Site name or tag. [default: %(default)s]",
+ action="store",
+ dest="site_name",
+ required=False,
+ type=str,
+ default="",
+ )
+ pt_parser.add_argument(
+ "--uniform-snowpack",
+ help="Modify surface data to have a uniform snow fraction.",
+ action="store_true",
+ dest="uni_snow",
+ required=False,
+ )
+ pt_parser.add_argument(
+ "--include-nonveg",
+ help="Do not zero non-vegetation land units in the surface data.",
+ action="store_true",
+ dest="include_nonveg",
+ required=False,
+ )
+ pt_parser.add_argument(
+ "--cap-saturation",
+ help="Modify surface data to not allow saturation excess.",
+ action="store_true",
+ dest="cap_saturation",
+ required=False,
+ )
+ pt_parser.add_argument(
+ "--dompft",
+ help="Dominant PFT(s): if we set the grid to 100%% one or multiple PFTs \
+ [default: %(default)s].",
+ action="store",
+ dest="dom_pft",
+ type=int,
+ default=None,
+ nargs='*',
+ )
+ pt_parser.add_argument(
+ "--pctpft",
+ help="Percetages of each pft (set by --dompft) on the land unit.",
+ action="store",
+ dest="pct_pft",
+ type=float,
+ default=None,
+ nargs='*',
+ )
+ # -- region-specific parser options
+ rg_parser.add_argument(
+ "--lat1",
+ help="Region start latitude. [default: %(default)s]",
+ action="store",
+ dest="lat1",
+ required=False,
+ type=plat_type,
+ default=-40,
+ )
+ rg_parser.add_argument(
+ "--lat2",
+ help="Region end latitude. [default: %(default)s]",
+ action="store",
+ dest="lat2",
+ required=False,
+ type=plat_type,
+ default=15,
+ )
+ rg_parser.add_argument(
+ "--lon1",
+ help="Region start longitude. [default: %(default)s]",
+ action="store",
+ dest="lon1",
+ required=False,
+ type=plon_type,
+ default=275.0,
+ )
+ rg_parser.add_argument(
+ "--lon2",
+ help="Region end longitude. [default: %(default)s]",
+ action="store",
+ dest="lon2",
+ required=False,
+ type=plon_type,
+ default=330.0,
+ )
+ rg_parser.add_argument(
+ "--reg",
+ help="Region name or tag. [default: %(default)s]",
+ action="store",
+ dest="reg_name",
+ required=False,
+ type=str,
+ default="",
+ )
+ rg_parser.add_argument(
+ "--create-mesh",
+ help="Subset a mesh file for a region.",
+ action="store_true",
+ dest="create_mesh",
+ required=False,
+ )
+
+ # -- common options between both subparsers
+ for subparser in [pt_parser, rg_parser]:
+ subparser.add_argument(
+ "--create-domain",
+ help="Create CLM domain file at single point/region. \
+ Domain files are not needed for NUOPC cases.",
+ action="store_true",
+ dest="create_domain",
+ required=False,
+ )
+ subparser.add_argument(
+ "--create-surface",
+ help="Create surface data file at single point/region.",
+ action="store_true",
+ dest="create_surfdata",
+ required=False,
+ )
+ subparser.add_argument(
+ "--create-landuse",
+ help="Create landuse data file at single point/region.",
+ action="store_true",
+ dest="create_landuse",
+ required=False,
+ )
+ subparser.add_argument(
+ "--create-datm",
+ help="Create DATM forcing data at single point.",
+ action="store_true",
+ dest="create_datm",
+ required=False,
+ )
+ subparser.add_argument(
+ "--create-user-mods",
+ help="Create user mods directories and files for running CTSM with the subset data.",
+ action="store_true",
+ dest="create_user_mods",
+ required=False,
+ )
+ subparser.add_argument(
+ "--datm-syr",
+ help="Start year for creating DATM forcing at single point/region. [default: %("
+ "default)s]",
+ action="store",
+ dest="datm_syr",
+ required=False,
+ type=int,
+ default=1901,
+ )
+ subparser.add_argument(
+ "--datm-eyr",
+ help="End year for creating DATM forcing at single point/region. "
+ "[default: %(default)s]",
+ action="store",
+ dest="datm_eyr",
+ required=False,
+ type=int,
+ default=2014,
+ )
+ subparser.add_argument(
+ "--crop",
+ help="Create datasets using the extensive list of prognostic crop types.",
+ action="store_true",
+ dest="crop_flag",
+ required=False,
+ )
+
+ if subparser == pt_parser:
+ parser_name = "single_point"
+ else:
+ parser_name = "regional"
+
+ subparser.add_argument(
+ "--outdir",
+ help="Output directory. \n [default: %(default)s]",
+ action="store",
+ dest="out_dir",
+ type=str,
+ default=os.path.join(os.getcwd(), "subset_data_" + parser_name),
+ )
+ subparser.add_argument(
+ "--user-mods-dir",
+ help="User mods directory.",
+ action="store",
+ dest="user_mods_dir",
+ type=str,
+ default="",
+ )
+ subparser.add_argument(
+ "--overwrite",
+ help="Flag to overwrite if the files already exists.",
+ action="store_true",
+ dest="overwrite",
+ )
+ add_logging_args(subparser)
+
+ # -- print help for both subparsers
+ parser.epilog = textwrap.dedent(
+ f"""\
+ {pt_parser.format_help()}
+ {rg_parser.format_help()}
+ """
+ )
+ return parser
+
+def setup_user_mods(user_mods_dir, cesmroot):
+ """
+ Sets up the user mods files and directories
+ """
+ if not os.path.isdir(user_mods_dir):
+ os.mkdir(user_mods_dir)
+
+ nl_clm_base = os.path.join(cesmroot, "cime_config/user_nl_clm")
+ nl_clm = os.path.join(user_mods_dir, "user_nl_clm")
+ with open(nl_clm_base, "r") as basefile, open(nl_clm, "w") as user_file:
+ for line in basefile:
+ user_file.write(line)
+
+ nl_datm_base = os.path.join(cesmroot, "components/cdeps/datm/cime_config"
+ "/user_nl_datm_streams")
+ nl_datm = os.path.join(user_mods_dir, "user_nl_datm_streams")
+ with open(nl_datm_base, "r") as base_file, open(nl_datm, 'w') as user_file:
+ for line in base_file:
+ user_file.write(line)
+
+def determine_num_pft (crop):
+ """
+ A simple function to determine the number of pfts.
+
+ Args:
+ crop (bool): crop flag denoting if we are using crop
+
+ Raises:
+
+ Returns:
+ num_pft (int) : number of pfts for surface dataset
+ """
+ if crop:
+ num_pft = "78"
+ else:
+ num_pft = "16"
+ logger.debug("crop_flag = %s => num_pft = %s", crop.__str__(), num_pft)
+ return num_pft
+
+
+def setup_files(args, defaults, cesmroot):
+ """
+ Sets up the files and folders needed for this program
+ """
+
+ if args.user_mods_dir == "":
+ args.user_mods_dir = os.path.join(args.out_dir, "user_mods")
+ if not os.path.isdir(args.out_dir):
+ os.mkdir(args.out_dir)
+
+ if args.create_user_mods:
+ setup_user_mods(args.user_mods_dir, cesmroot)
+
+ # DATM data
+ datm_type = 'datm_gswp3'
+ dir_output_datm = "datmdata"
+ dir_input_datm = defaults.get(datm_type, "dir")
+ if args.create_datm:
+ if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)):
+ os.mkdir(os.path.join(args.out_dir, dir_output_datm))
+ logger.info("dir_input_datm : %s", dir_input_datm)
+ logger.info("dir_output_datm: %s", os.path.join(args.out_dir, dir_output_datm))
+
+ # if the crop flag is on - we need to use a different land use and surface data file
+ num_pft = determine_num_pft(args.crop_flag)
+
+ fsurf_in = defaults.get("surfdat", "surfdat_"+num_pft+"pft")
+ fluse_in = defaults.get("landuse", "landuse_"+num_pft+"pft")
+
+ file_dict = {'main_dir': defaults.get("main", "clmforcingindir"),
+ 'fdomain_in': defaults.get("domain", "file"),
+ 'fsurf_dir': os.path.join(defaults.get("main", "clmforcingindir"),
+ os.path.join(defaults.get("surfdat", "dir"))),
+ 'fluse_dir': os.path.join(defaults.get("main", "clmforcingindir"),
+ os.path.join(defaults.get("landuse", "dir"))),
+ 'fsurf_in': fsurf_in,
+ 'fluse_in': fluse_in,
+ 'datm_tuple': DatmFiles(dir_input_datm,
+ dir_output_datm,
+ defaults.get(datm_type, "domain"),
+ defaults.get(datm_type, 'solardir'),
+ defaults.get(datm_type, 'precdir'),
+ defaults.get(datm_type, 'tpqwdir'),
+ defaults.get(datm_type, 'solartag'),
+ defaults.get(datm_type, 'prectag'),
+ defaults.get(datm_type, 'tpqwtag'),
+ defaults.get(datm_type, 'solarname'),
+ defaults.get(datm_type, 'precname'),
+ defaults.get(datm_type, 'tpqwname'))
+ }
+
+ return file_dict
+
+
+def subset_point(args, file_dict: dict):
+ """
+ Subsets surface, domain, land use, and/or DATM files at a single point
+ """
+
+ logger.info("----------------------------------------------------------------------------")
+ logger.info("This script extracts a single point from the global CTSM datasets.")
+
+ num_pft = int(determine_num_pft(args.crop_flag))
+
+ # -- Create SinglePoint Object
+ single_point = SinglePointCase(
+ plat = args.plat,
+ plon = args.plon,
+ site_name = args.site_name,
+ create_domain = args.create_domain,
+ create_surfdata = args.create_surfdata,
+ create_landuse = args.create_landuse,
+ create_datm = args.create_datm,
+ create_user_mods = args.create_user_mods,
+ dom_pft = args.dom_pft,
+ pct_pft = args.pct_pft,
+ num_pft = num_pft,
+ include_nonveg = args.include_nonveg,
+ uni_snow = args.uni_snow,
+ cap_saturation = args.cap_saturation,
+ out_dir = args.out_dir,
+ overwrite = args.overwrite,
+ )
+
+ logger.debug(single_point)
+
+ # -- Create CTSM domain file
+ if single_point.create_domain:
+ single_point.create_domain_at_point(file_dict["main_dir"], file_dict["fdomain_in"])
+
+ # -- Create CTSM surface data file
+ if single_point.create_surfdata:
+ single_point.create_surfdata_at_point(file_dict["fsurf_dir"], file_dict["fsurf_in"],
+ args.user_mods_dir)
+
+ # -- Create CTSM transient landuse data file
+ if single_point.create_landuse:
+ single_point.create_landuse_at_point(file_dict["fluse_dir"], file_dict["fluse_in"],
+ args.user_mods_dir)
+
+ # -- Create single point atmospheric forcing data
+ if single_point.create_datm:
+ # subset DATM domain file
+ single_point.create_datmdomain_at_point(file_dict["datm_tuple"])
+
+ # subset the DATM data
+ nl_datm = os.path.join(args.user_mods_dir, "user_nl_datm_streams")
+ single_point.create_datm_at_point(file_dict['datm_tuple'], args.datm_syr, args.datm_eyr,
+ nl_datm)
+
+ # -- Write shell commands
+ if single_point.create_user_mods:
+ single_point.write_shell_commands(os.path.join(args.user_mods_dir, "shell_commands"))
+
+ logger.info("Successfully ran script for single point.")
+
+
+def subset_region(args, file_dict: dict):
+ """
+ Subsets surface, domain, land use, and/or DATM files for a region
+ """
+
+ logger.info("----------------------------------------------------------------------------")
+ logger.info("This script extracts a region from the global CTSM datasets.")
+
+ # -- Create Region Object
+ region = RegionalCase(
+ lat1 = args.lat1,
+ lat2 = args.lat2,
+ lon1 = args.lon1,
+ lon2 = args.lon2,
+ reg_name = args.reg_name,
+ create_domain = args.create_domain,
+ create_surfdata = args.create_surfdata,
+ create_landuse = args.create_landuse,
+ create_datm = args.create_datm,
+ create_user_mods = args.create_user_mods,
+ out_dir = args.out_dir,
+ overwrite = args.overwrite,
+ )
+
+ logger.debug(region)
+
+ # -- Create CTSM domain file
+ if region.create_domain:
+ region.create_domain_at_reg(file_dict["main_dir"], file_dict["fdomain_in"])
+
+ # -- Create CTSM surface data file
+ if region.create_surfdata:
+ region.create_surfdata_at_reg(file_dict["fsurf_dir"], file_dict["fsurf_in"],
+ args.user_mods_dir)
+
+ # -- Create CTSM transient landuse data file
+ if region.create_landuse:
+ region.create_landuse_at_reg(file_dict["fluse_dir"], file_dict["fluse_in"],
+ args.user_mods_dir)
+
+ logger.info("Successfully ran script for a regional case.")
+
+
+def main():
+ """
+ Calls functions that subset surface, landuse, domain, and/or DATM files for a region or a
+ single point.
+ """
+
+ # --------------------------------- #
+ # add logging flags from ctsm_logging
+ setup_logging_pre_config()
+ parser = get_parser()
+ args = parser.parse_args()
+
+ # --------------------------------- #
+ # print help and exit when no option is chosen
+ if args.run_type != "point" and args.run_type != "region":
+ err_msg = textwrap.dedent('''\
+ \n ------------------------------------
+ \n Must supply a positional argument: 'point' or 'region'.
+ '''
+ )
+ raise parser.error(err_msg)
+
+ if not any([args.create_surfdata, args.create_domain, args.create_landuse, args.create_datm]):
+ err_msg = textwrap.dedent('''\
+ \n ------------------------------------
+ \n Must supply one of:
+ \n --create-surface \n --create-landuse \n --create-datm \n --create-domain \n
+ '''
+ )
+ raise parser.error(err_msg)
+
+ # --------------------------------- #
+ # process logging args (i.e. debug and verbose)
+ process_logging_args(args)
+
+ # --------------------------------- #
+ # parse defaults file
+ cesmroot = path_to_ctsm_root()
+ defaults = configparser.ConfigParser()
+ defaults.read(os.path.join(cesmroot, "tools/site_and_regional", DEFAULTS_FILE))
+
+ # --------------------------------- #
+ myname = getuser()
+ pwd = os.getcwd()
+ logger.info("User = %s", myname)
+ logger.info("Current directory = %s", pwd)
+
+ # --------------------------------- #
+ # create files and folders necessary and return dictionary of file/folder locations
+ file_dict = setup_files(args, defaults, cesmroot)
+
+ if args.run_type == "point":
+ subset_point(args, file_dict)
+ elif args.run_type == "region":
+ subset_region(args, file_dict)
diff --git a/python/ctsm/test/test_sys_fsurdat_modifier.py b/python/ctsm/test/test_sys_fsurdat_modifier.py
index 7d2819261c..5754269d59 100755
--- a/python/ctsm/test/test_sys_fsurdat_modifier.py
+++ b/python/ctsm/test/test_sys_fsurdat_modifier.py
@@ -25,6 +25,15 @@ class TestSysFsurdatModifier(unittest.TestCase):
"""System tests for fsurdat_modifier"""
def setUp(self):
+ """
+ Obtain path to the existing:
+ - modify_template.cfg file
+ - /testinputs directory and fsurdat_in, located in /testinputs
+ Make /_tempdir for use by these tests.
+ Obtain path and names for the files being created in /_tempdir:
+ - modify_fsurdat.cfg
+ - fsurdat_out.nc
+ """
self._cfg_template_path = os.path.join(path_to_ctsm_root(),
'tools/modify_fsurdat/modify_template.cfg')
testinputs_path = os.path.join(path_to_ctsm_root(),
@@ -44,6 +53,7 @@ def tearDown(self):
def test_minimalInfo(self):
"""
This test specifies a minimal amount of information
+ Create .cfg file, run the tool, compare fsurdat_in to fsurdat_out
"""
self._create_config_file_minimal()
@@ -59,9 +69,37 @@ def test_minimalInfo(self):
self.assertTrue(fsurdat_out_data.equals(fsurdat_in_data))
+ def test_crop(self):
+ """
+ This version replaces the vegetation with a crop
+ Create .cfg file, run the tool, compare fsurdat_in to fsurdat_out
+ """
+
+ self._create_config_file_crop()
+
+ # run the fsurdat_modifier tool
+ fsurdat_modifier(self._cfg_file_path)
+ # the critical piece of this test is that the above command
+ # doesn't generate errors; however, we also do some assertions below
+
+ # compare fsurdat_out to fsurdat_in
+ fsurdat_in_data = xr.open_dataset(self._fsurdat_in)
+ fsurdat_out_data = xr.open_dataset(self._fsurdat_out)
+ # assert that fsurdat_out does not equal fsurdat_in
+ self.assertFalse(fsurdat_out_data.equals(fsurdat_in_data))
+
+ # compare fsurdat_out to fsurdat_out_baseline located in /testinputs
+ fsurdat_out_baseline = self._fsurdat_in[:-3] + '_modified_with_crop' + \
+ self._fsurdat_in[-3:]
+ fsurdat_out_base_data = xr.open_dataset(fsurdat_out_baseline)
+ # assert that fsurdat_out equals fsurdat_out_baseline
+ self.assertTrue(fsurdat_out_data.equals(fsurdat_out_base_data))
+
+
def test_allInfo(self):
"""
This version specifies all possible information
+ Create .cfg file, run the tool, compare fsurdat_in to fsurdat_out
"""
self._create_config_file_complete()
@@ -77,7 +115,7 @@ def test_allInfo(self):
# assert that fsurdat_out does not equal fsurdat_in
self.assertFalse(fsurdat_out_data.equals(fsurdat_in_data))
- # compare fsurdat_out to fsurdat_out_baseline
+ # compare fsurdat_out to fsurdat_out_baseline located in /testinputs
fsurdat_out_baseline = self._fsurdat_in[:-3] + '_modified' + \
self._fsurdat_in[-3:]
fsurdat_out_base_data = xr.open_dataset(fsurdat_out_baseline)
@@ -86,26 +124,68 @@ def test_allInfo(self):
def _create_config_file_minimal(self):
+ """
+ Open the new and the template .cfg files
+ Loop line by line through the template .cfg file
+ When string matches, replace that line's content
+ """
+ with open (self._cfg_file_path, 'w', encoding='utf-8') as cfg_out:
+ with open (self._cfg_template_path, 'r', encoding='utf-8') as cfg_in:
+ for line in cfg_in:
+ if re.match(r' *fsurdat_in *=', line):
+ line = f'fsurdat_in = {self._fsurdat_in}'
+ elif re.match(r' *fsurdat_out *=', line):
+ line = f'fsurdat_out = {self._fsurdat_out}'
+ cfg_out.write(line)
+
- with open (self._cfg_file_path,'w') as cfg_out:
- with open (self._cfg_template_path,'r') as cfg_in:
+ def _create_config_file_crop(self):
+ """
+ Open the new and the template .cfg files
+ Loop line by line through the template .cfg file
+ When string matches, replace that line's content
+ """
+ with open (self._cfg_file_path, 'w', encoding='utf-8') as cfg_out:
+ with open (self._cfg_template_path, 'r', encoding='utf-8') as cfg_in:
for line in cfg_in:
if re.match(r' *fsurdat_in *=', line):
- line = 'fsurdat_in = {}'.format(self._fsurdat_in)
+ line = f'fsurdat_in = {self._fsurdat_in}'
elif re.match(r' *fsurdat_out *=', line):
- line = 'fsurdat_out = {}'.format(self._fsurdat_out)
+ line = f'fsurdat_out = {self._fsurdat_out}'
+ elif re.match(r' *lnd_lat_1 *=', line):
+ line = 'lnd_lat_1 = -10\n'
+ elif re.match(r' *lnd_lat_2 *=', line):
+ line = 'lnd_lat_2 = -7\n'
+ elif re.match(r' *lnd_lon_1 *=', line):
+ line = 'lnd_lon_1 = 295\n'
+ elif re.match(r' *lnd_lon_2 *=', line):
+ line = 'lnd_lon_2 = 300\n'
+ elif re.match(r' *dom_plant *=', line):
+ line = 'dom_plant = 15'
+ elif re.match(r' *lai *=', line):
+ line = 'lai = 0 1 2 3 4 5 5 4 3 2 1 0\n'
+ elif re.match(r' *sai *=', line):
+ line = 'sai = 1 1 1 1 1 1 1 1 1 1 1 1\n'
+ elif re.match(r' *hgt_top *=', line):
+ line = 'hgt_top = 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5\n'
+ elif re.match(r' *hgt_bot *=', line):
+ line = 'hgt_bot = 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1\n'
cfg_out.write(line)
def _create_config_file_complete(self):
-
- with open (self._cfg_file_path,'w') as cfg_out:
- with open (self._cfg_template_path,'r') as cfg_in:
+ """
+ Open the new and the template .cfg files
+ Loop line by line through the template .cfg file
+ When string matches, replace that line's content
+ """
+ with open (self._cfg_file_path, 'w', encoding='utf-8') as cfg_out:
+ with open (self._cfg_template_path, 'r', encoding='utf-8') as cfg_in:
for line in cfg_in:
if re.match(r' *fsurdat_in *=', line):
- line = 'fsurdat_in = {}'.format(self._fsurdat_in)
+ line = f'fsurdat_in = {self._fsurdat_in}'
elif re.match(r' *fsurdat_out *=', line):
- line = 'fsurdat_out = {}'.format(self._fsurdat_out)
+ line = f'fsurdat_out = {self._fsurdat_out}'
elif re.match(r' *idealized *=', line):
line = 'idealized = True'
elif re.match(r' *lnd_lat_1 *=', line):
@@ -116,8 +196,8 @@ def _create_config_file_complete(self):
line = 'lnd_lon_1 = 295\n'
elif re.match(r' *lnd_lon_2 *=', line):
line = 'lnd_lon_2 = 300\n'
- elif re.match(r' *dom_nat_pft *=', line):
- line = 'dom_nat_pft = 1'
+ elif re.match(r' *dom_plant *=', line):
+ line = 'dom_plant = 1'
elif re.match(r' *lai *=', line):
line = 'lai = 0 1 2 3 4 5 5 4 3 2 1 0\n'
elif re.match(r' *sai *=', line):
diff --git a/python/ctsm/test/test_unit_args_utils.py b/python/ctsm/test/test_unit_args_utils.py
new file mode 100755
index 0000000000..73aab9f6a8
--- /dev/null
+++ b/python/ctsm/test/test_unit_args_utils.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python3
+"""
+Unit tests for arg_utils.py function and types.
+
+You can run this by:
+ python -m unittest test_unit_args_utils.py
+"""
+
+import os
+import sys
+import unittest
+import argparse
+
+# -- add python/ctsm to path (needed if we want to run the test stand-alone)
+_CTSM_PYTHON = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir)
+sys.path.insert(1, _CTSM_PYTHON)
+
+#pylint: disable=wrong-import-position
+from ctsm.args_utils import plon_type, plat_type
+from ctsm import unit_testing
+
+# pylint: disable=invalid-name
+
+class TestArgsPlon(unittest.TestCase):
+ """
+ Tests for plot_type in args_util.py
+ """
+
+ # --between 0-360
+ def test_plonType_positive(self):
+ """
+ Test of positive plon between 0 and 360
+ """
+ result = plon_type(30)
+ self.assertEqual(result, 30.0)
+
+ # --between -180-0
+ def test_plonType_negative(self):
+ """
+ Test of negative plon between -180 and 0
+ """
+ result = plon_type(-30)
+ self.assertEqual(result, 330.0)
+
+ # -- > 360
+ def test_plonType_outOfBounds_positive(self):
+ """
+ Test of plon values greater than 360
+ """
+ with self.assertRaisesRegex(
+ argparse.ArgumentTypeError, "Longitude.*should be between"
+ ):
+ _ = plon_type(360.5)
+
+ # -- < -180
+ def test_plonType_outOfBounds_negative(self):
+ """
+ Test of plon values smaller than -180
+ """
+ with self.assertRaisesRegex(
+ argparse.ArgumentTypeError, "Longitude.*should be between"
+ ):
+ _ = plon_type(-200)
+
+ # -- = -180
+ def test_plonType_negative_180(self):
+ """
+ Test for when plon values are -180
+ """
+ result = plon_type(-180)
+ self.assertEqual(result, 180.0)
+
+ # -- = 0
+ def test_plonType_zero(self):
+ """
+ Test for when plon values are 0
+ """
+ result = plon_type(0)
+ self.assertEqual(result, 0)
+
+ # -- = 360
+ def test_plonType_positive_360(self):
+ """
+ Test for when plon values are 360.
+ """
+ result = plon_type(360)
+ self.assertEqual(result, 360.0)
+
+class TestArgsPlat(unittest.TestCase):
+ """
+ Tests for plat_type in args_util.py
+ """
+ def test_platType_outOfBounds_positive(self):
+ """
+ Test of plat_type bigger than 90
+ """
+ with self.assertRaisesRegex(
+ argparse.ArgumentTypeError, "Latitude.*should be between"
+ ):
+ _ = plat_type(91)
+
+ def test_platType_outOfBounds_pos90(self):
+ """
+ Test of plat_type is 90
+ """
+ result = plat_type(90)
+ self.assertEqual(result, 90.0)
+
+ def test_platType_outOfBounds_neg90(self):
+ """
+ Test of plat_type is -90
+ """
+ result = plat_type(-90)
+ self.assertEqual(result, -90.0)
+
+ def test_platType_outOfBounds_negative(self):
+ """
+ Test of plat_type smaller than -90
+ """
+ with self.assertRaisesRegex(
+ argparse.ArgumentTypeError, "Latitude.*should be between"
+ ):
+ _ = plat_type(-91)
+
+if __name__ == "__main__":
+ unit_testing.setup_for_tests()
+ unittest.main()
diff --git a/python/ctsm/test/test_unit_machine.py b/python/ctsm/test/test_unit_machine.py
index 6a2f7ac172..bc1a6f777d 100755
--- a/python/ctsm/test/test_unit_machine.py
+++ b/python/ctsm/test/test_unit_machine.py
@@ -9,7 +9,8 @@
from ctsm import add_cime_to_path # pylint: disable=unused-import
from ctsm import unit_testing
-from ctsm.machine import create_machine, get_possibly_overridden_mach_value
+from ctsm.machine import (create_machine, get_possibly_overridden_mach_value,
+ CREATE_TEST_QUEUE_UNSPECIFIED)
from ctsm.machine_utils import get_user
from ctsm.machine_defaults import MACHINE_DEFAULTS, MachineDefaults, QsubDefaults
from ctsm.joblauncher.job_launcher_no_batch import JobLauncherNoBatch
@@ -24,7 +25,8 @@ class TestCreateMachine(unittest.TestCase):
"""Tests of create_machine"""
def assertMachineInfo(self, machine, name, scratch_dir, baseline_dir, account,
- create_test_retry=0):
+ create_test_retry=0,
+ create_test_queue=CREATE_TEST_QUEUE_UNSPECIFIED):
"""Asserts that the basic machine info is as expected.
This does NOT dive down into the job launcher"""
@@ -33,6 +35,7 @@ def assertMachineInfo(self, machine, name, scratch_dir, baseline_dir, account,
self.assertEqual(machine.baseline_dir, baseline_dir)
self.assertEqual(machine.account, account)
self.assertEqual(machine.create_test_retry, create_test_retry)
+ self.assertEqual(machine.create_test_queue, create_test_queue)
def assertNoBatchInfo(self, machine, nice_level=None):
"""Asserts that the machine's launcher is of type JobLauncherNoBatch"""
@@ -65,6 +68,7 @@ def create_defaults(default_job_launcher=JOB_LAUNCHER_QSUB):
baseline_dir=os.path.join(os.path.sep, 'my', 'baselines'),
account_required=True,
create_test_retry=2,
+ create_test_queue="regular",
job_launcher_defaults={
JOB_LAUNCHER_QSUB: QsubDefaults(
queue='regular',
@@ -134,7 +138,8 @@ def test_knownMachine_defaults(self):
get_user()),
baseline_dir=os.path.join(os.path.sep, 'my', 'baselines'),
account='a123',
- create_test_retry=2)
+ create_test_retry=2,
+ create_test_queue="regular")
self.assertQsubInfo(machine=machine,
queue='regular',
walltime='06:00:00',
@@ -157,7 +162,8 @@ def test_knownMachine_argsExplicit(self):
scratch_dir='/custom/path/to/scratch',
baseline_dir=os.path.join(os.path.sep, 'my', 'baselines'),
account='a123',
- create_test_retry=2)
+ create_test_retry=2,
+ create_test_queue="regular")
self.assertQsubInfo(machine=machine,
queue='custom_queue',
walltime='9:87:65',
diff --git a/python/ctsm/test/test_unit_modify_fsurdat.py b/python/ctsm/test/test_unit_modify_fsurdat.py
index 19c53dac6a..a5182ae6e4 100755
--- a/python/ctsm/test/test_unit_modify_fsurdat.py
+++ b/python/ctsm/test/test_unit_modify_fsurdat.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""
-Unit tests for _get_not_rectangle
+Unit tests for _get_rectangle
"""
import unittest
@@ -10,7 +10,7 @@
import xarray as xr
from ctsm import unit_testing
-from ctsm.utils import lon_range_0_to_360
+from ctsm.config_utils import lon_range_0_to_360
from ctsm.modify_fsurdat.modify_fsurdat import ModifyFsurdat
# Allow test names that pylint doesn't like; otherwise hard to make them
@@ -19,9 +19,10 @@
# pylint: disable=protected-access
+
class TestModifyFsurdat(unittest.TestCase):
"""Tests the setvar_lev functions and the
- _get_not_rectangle function
+ _get_rectangle function
"""
def test_setvarLev(self):
@@ -36,7 +37,8 @@ def test_setvarLev(self):
min_lon = 2 # expects min_lon < max_lon
min_lat = 3 # expects min_lat < max_lat
longxy, latixy, cols, rows = self._get_longxy_latixy(
- _min_lon=min_lon, _max_lon=10, _min_lat=min_lat, _max_lat=12)
+ _min_lon=min_lon, _max_lon=10, _min_lat=min_lat, _max_lat=12
+ )
# get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2
lon_1 = 3
@@ -47,39 +49,54 @@ def test_setvarLev(self):
# create xarray dataset containing lev0, lev1, and lev2 variables;
# the fsurdat_modify tool reads variables like this from fsurdat file
var_1d = np.arange(cols)
- var_lev2 = var_1d * np.ones((rows,cols,rows,cols))
- var_lev1 = var_1d * np.ones((cols,rows,cols))
- my_data = xr.Dataset(data_vars=dict(
- LONGXY=(["x", "y"], longxy), # use LONGXY as var_lev0
- LATIXY=(["x", "y"], latixy), # __init__ expects LONGXY, LATIXY
- var_lev1=(["w", "x", "y"], var_lev1),
- var_lev2=(["v", "w", "x", "y"], var_lev2)))
+ var_lev2 = var_1d * np.ones((rows, cols, rows, cols))
+ var_lev1 = var_1d * np.ones((cols, rows, cols))
+ my_data = xr.Dataset(
+ data_vars=dict(
+ LONGXY=(["x", "y"], longxy), # use LONGXY as var_lev0
+ LATIXY=(["x", "y"], latixy), # __init__ expects LONGXY, LATIXY
+ var_lev1=(["w", "x", "y"], var_lev1),
+ var_lev2=(["v", "w", "x", "y"], var_lev2),
+ )
+ )
# create ModifyFsurdat object
- modify_fsurdat = ModifyFsurdat(my_data=my_data, lon_1=lon_1,
- lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, landmask_file=None)
+ modify_fsurdat = ModifyFsurdat(
+ my_data=my_data,
+ lon_1=lon_1,
+ lon_2=lon_2,
+ lat_1=lat_1,
+ lat_2=lat_2,
+ landmask_file=None,
+ )
# initialize and then modify the comparison matrices
comp_lev0 = modify_fsurdat.file.LONGXY
comp_lev1 = modify_fsurdat.file.var_lev1
comp_lev2 = modify_fsurdat.file.var_lev2
val_for_rectangle = 1.5
- comp_lev0[lat_1-min_lat:lat_2-min_lat+1,
- lon_1-min_lon:lon_2-min_lon+1] = val_for_rectangle
- comp_lev1[...,lat_1-min_lat:lat_2-min_lat+1,
- lon_1-min_lon:lon_2-min_lon+1] = val_for_rectangle
- comp_lev2[...,lat_1-min_lat:lat_2-min_lat+1,
- lon_1-min_lon:lon_2-min_lon+1] = val_for_rectangle
+ comp_lev0[
+ lat_1 - min_lat : lat_2 - min_lat + 1, lon_1 - min_lon : lon_2 - min_lon + 1
+ ] = val_for_rectangle
+ comp_lev1[
+ ...,
+ lat_1 - min_lat : lat_2 - min_lat + 1,
+ lon_1 - min_lon : lon_2 - min_lon + 1,
+ ] = val_for_rectangle
+ comp_lev2[
+ ...,
+ lat_1 - min_lat : lat_2 - min_lat + 1,
+ lon_1 - min_lon : lon_2 - min_lon + 1,
+ ] = val_for_rectangle
# test setvar
- modify_fsurdat.setvar_lev0('LONGXY', val_for_rectangle)
+ modify_fsurdat.setvar_lev0("LONGXY", val_for_rectangle)
np.testing.assert_array_equal(modify_fsurdat.file.LONGXY, comp_lev0)
- modify_fsurdat.setvar_lev1('var_lev1', val_for_rectangle, cols-1)
+ modify_fsurdat.setvar_lev1("var_lev1", val_for_rectangle, cols - 1)
np.testing.assert_array_equal(modify_fsurdat.file.var_lev1, comp_lev1)
- modify_fsurdat.setvar_lev2('var_lev2', val_for_rectangle, cols-1,
- rows-1)
+ modify_fsurdat.setvar_lev2("var_lev2", val_for_rectangle, cols - 1, rows - 1)
np.testing.assert_array_equal(modify_fsurdat.file.var_lev2, comp_lev2)
def test_getNotRectangle_lon1leLon2Lat1leLat2(self):
@@ -96,16 +113,18 @@ def test_getNotRectangle_lon1leLon2Lat1leLat2(self):
min_lon = 2 # expects min_lon < max_lon
min_lat = 3 # expects min_lat < max_lat
longxy, latixy, cols, rows = self._get_longxy_latixy(
- _min_lon=min_lon, _max_lon=7, _min_lat=min_lat, _max_lat=8)
+ _min_lon=min_lon, _max_lon=7, _min_lat=min_lat, _max_lat=8
+ )
# get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2
lon_1 = 3
lon_2 = 5 # lon_1 < lon_2
lat_1 = 6
lat_2 = 8 # lat_1 < lat_2
- not_rectangle = ModifyFsurdat._get_not_rectangle(
+ rectangle = ModifyFsurdat._get_rectangle(
lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2,
longxy=longxy, latixy=latixy)
+ not_rectangle = np.logical_not(rectangle)
compare = np.ones((rows,cols))
# assert this to confirm intuitive understanding of these matrices
self.assertEqual(np.size(not_rectangle), np.size(compare))
@@ -113,7 +132,9 @@ def test_getNotRectangle_lon1leLon2Lat1leLat2(self):
# Hardwire where I expect not_rectangle to be False (0)
# I have chosen the lon/lat ranges to match their corresponding index
# values to keep this simple
- compare[lat_1-min_lat:lat_2-min_lat+1, lon_1-min_lon:lon_2-min_lon+1] = 0
+ compare[
+ lat_1 - min_lat : lat_2 - min_lat + 1, lon_1 - min_lon : lon_2 - min_lon + 1
+ ] = 0
np.testing.assert_array_equal(not_rectangle, compare)
def test_getNotRectangle_lon1leLon2Lat1gtLat2(self):
@@ -131,7 +152,8 @@ def test_getNotRectangle_lon1leLon2Lat1gtLat2(self):
min_lon = -3 # expects min_lon < max_lon
min_lat = -2 # expects min_lat < max_lat
longxy, latixy, cols, rows = self._get_longxy_latixy(
- _min_lon=min_lon, _max_lon=6, _min_lat=min_lat, _max_lat=5)
+ _min_lon=min_lon, _max_lon=6, _min_lat=min_lat, _max_lat=5
+ )
# get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2
# I have chosen the lon/lat ranges to match their corresponding index
@@ -140,9 +162,10 @@ def test_getNotRectangle_lon1leLon2Lat1gtLat2(self):
lon_2 = 4 # lon_1 < lon_2
lat_1 = 4
lat_2 = 0 # lat_1 > lat_2
- not_rectangle = ModifyFsurdat._get_not_rectangle(
+ rectangle = ModifyFsurdat._get_rectangle(
lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2,
longxy=longxy, latixy=latixy)
+ not_rectangle = np.logical_not(rectangle)
compare = np.ones((rows,cols))
# assert this to confirm intuitive understanding of these matrices
self.assertEqual(np.size(not_rectangle), np.size(compare))
@@ -150,8 +173,8 @@ def test_getNotRectangle_lon1leLon2Lat1gtLat2(self):
# Hardwire where I expect not_rectangle to be False (0)
# I have chosen the lon/lat ranges to match their corresponding index
# values to keep this simple
- compare[:lat_2-min_lat+1, lon_1-min_lon:lon_2-min_lon+1] = 0
- compare[lat_1-min_lat:, lon_1-min_lon:lon_2-min_lon+1] = 0
+ compare[: lat_2 - min_lat + 1, lon_1 - min_lon : lon_2 - min_lon + 1] = 0
+ compare[lat_1 - min_lat :, lon_1 - min_lon : lon_2 - min_lon + 1] = 0
np.testing.assert_array_equal(not_rectangle, compare)
def test_getNotRectangle_lon1gtLon2Lat1leLat2(self):
@@ -169,7 +192,8 @@ def test_getNotRectangle_lon1gtLon2Lat1leLat2(self):
min_lon = 1 # expects min_lon < max_lon
min_lat = 1 # expects min_lat < max_lat
longxy, latixy, cols, rows = self._get_longxy_latixy(
- _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=90)
+ _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=90
+ )
# get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2
# I have chosen the lon/lat ranges to match their corresponding index
@@ -178,9 +202,10 @@ def test_getNotRectangle_lon1gtLon2Lat1leLat2(self):
lon_2 = 2 # lon_1 > lon_2
lat_1 = 2
lat_2 = 3 # lat_1 < lat_2
- not_rectangle = ModifyFsurdat._get_not_rectangle(
+ rectangle = ModifyFsurdat._get_rectangle(
lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2,
longxy=longxy, latixy=latixy)
+ not_rectangle = np.logical_not(rectangle)
compare = np.ones((rows,cols))
# assert this to confirm intuitive understanding of these matrices
self.assertEqual(np.size(not_rectangle), np.size(compare))
@@ -188,8 +213,8 @@ def test_getNotRectangle_lon1gtLon2Lat1leLat2(self):
# Hardwire where I expect not_rectangle to be False (0)
# I have chosen the lon/lat ranges to match their corresponding index
# values to keep this simple
- compare[lat_1-min_lat:lat_2-min_lat+1, :lon_2-min_lon+1] = 0
- compare[lat_1-min_lat:lat_2-min_lat+1, lon_1-min_lon:] = 0
+ compare[lat_1 - min_lat : lat_2 - min_lat + 1, : lon_2 - min_lon + 1] = 0
+ compare[lat_1 - min_lat : lat_2 - min_lat + 1, lon_1 - min_lon :] = 0
np.testing.assert_array_equal(not_rectangle, compare)
def test_getNotRectangle_lon1gtLon2Lat1gtLat2(self):
@@ -207,7 +232,8 @@ def test_getNotRectangle_lon1gtLon2Lat1gtLat2(self):
min_lon = -8 # expects min_lon < max_lon
min_lat = -9 # expects min_lat < max_lat
longxy, latixy, cols, rows = self._get_longxy_latixy(
- _min_lon=min_lon, _max_lon=5, _min_lat=min_lat, _max_lat=6)
+ _min_lon=min_lon, _max_lon=5, _min_lat=min_lat, _max_lat=6
+ )
# get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2
# I have chosen the lon/lat ranges to match their corresponding index
@@ -216,9 +242,10 @@ def test_getNotRectangle_lon1gtLon2Lat1gtLat2(self):
lon_2 = -6 # lon_1 > lon_2
lat_1 = 0
lat_2 = -3 # lat_1 > lat_2
- not_rectangle = ModifyFsurdat._get_not_rectangle(
+ rectangle = ModifyFsurdat._get_rectangle(
lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2,
longxy=longxy, latixy=latixy)
+ not_rectangle = np.logical_not(rectangle)
compare = np.ones((rows,cols))
# assert this to confirm intuitive understanding of these matrices
self.assertEqual(np.size(not_rectangle), np.size(compare))
@@ -226,10 +253,10 @@ def test_getNotRectangle_lon1gtLon2Lat1gtLat2(self):
# Hardwire where I expect not_rectangle to be False (0)
# I have chosen the lon/lat ranges to match their corresponding index
# values to keep this simple
- compare[:lat_2-min_lat+1, :lon_2-min_lon+1] = 0
- compare[:lat_2-min_lat+1, lon_1-min_lon:] = 0
- compare[lat_1-min_lat:, :lon_2-min_lon+1] = 0
- compare[lat_1-min_lat:, lon_1-min_lon:] = 0
+ compare[: lat_2 - min_lat + 1, : lon_2 - min_lon + 1] = 0
+ compare[: lat_2 - min_lat + 1, lon_1 - min_lon :] = 0
+ compare[lat_1 - min_lat :, : lon_2 - min_lon + 1] = 0
+ compare[lat_1 - min_lat :, lon_1 - min_lon :] = 0
np.testing.assert_array_equal(not_rectangle, compare)
def test_getNotRectangle_lonsStraddle0deg(self):
@@ -247,7 +274,8 @@ def test_getNotRectangle_lonsStraddle0deg(self):
min_lon = 0 # expects min_lon < max_lon
min_lat = -5 # expects min_lat < max_lat
longxy, latixy, cols, rows = self._get_longxy_latixy(
- _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=5)
+ _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=5
+ )
# get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2
# I have chosen the lon/lat ranges to match their corresponding index
@@ -256,9 +284,10 @@ def test_getNotRectangle_lonsStraddle0deg(self):
lon_2 = 5 # lon_1 > lon_2
lat_1 = -4
lat_2 = -6 # lat_1 > lat_2
- not_rectangle = ModifyFsurdat._get_not_rectangle(
+ rectangle = ModifyFsurdat._get_rectangle(
lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2,
longxy=longxy, latixy=latixy)
+ not_rectangle = np.logical_not(rectangle)
compare = np.ones((rows,cols))
# assert this to confirm intuitive understanding of these matrices
self.assertEqual(np.size(not_rectangle), np.size(compare))
@@ -266,10 +295,10 @@ def test_getNotRectangle_lonsStraddle0deg(self):
# Hardwire where I expect not_rectangle to be False (0)
# I have chosen the lon/lat ranges to match their corresponding index
# values to keep this simple
- compare[:lat_2-min_lat+1, :lon_2-min_lon+1] = 0
- compare[:lat_2-min_lat+1, lon_1-min_lon:] = 0
- compare[lat_1-min_lat:, :lon_2-min_lon+1] = 0
- compare[lat_1-min_lat:, lon_1-min_lon:] = 0
+ compare[: lat_2 - min_lat + 1, : lon_2 - min_lon + 1] = 0
+ compare[: lat_2 - min_lat + 1, lon_1 - min_lon :] = 0
+ compare[lat_1 - min_lat :, : lon_2 - min_lon + 1] = 0
+ compare[lat_1 - min_lat :, lon_1 - min_lon :] = 0
np.testing.assert_array_equal(not_rectangle, compare)
def test_getNotRectangle_latsOutOfBounds(self):
@@ -283,7 +312,8 @@ def test_getNotRectangle_latsOutOfBounds(self):
min_lon = 0 # expects min_lon < max_lon
min_lat = -5 # expects min_lat < max_lat
longxy, latixy, _, _ = self._get_longxy_latixy(
- _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=5)
+ _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=5
+ )
# get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2
# I have chosen the lon/lat ranges to match their corresponding index
@@ -294,7 +324,7 @@ def test_getNotRectangle_latsOutOfBounds(self):
lat_2 = 91
with self.assertRaisesRegex(SystemExit,
"lat_1 and lat_2 need to be in the range -90 to 90"):
- _ = ModifyFsurdat._get_not_rectangle(
+ _ = ModifyFsurdat._get_rectangle(
lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2,
longxy=longxy, latixy=latixy)
@@ -307,7 +337,7 @@ def _get_longxy_latixy(self, _min_lon, _max_lon, _min_lat, _max_lat):
long = np.arange(_min_lon, _max_lon + 1)
long = [lon_range_0_to_360(longitude) for longitude in long]
- longxy = long * np.ones((rows,cols))
+ longxy = long * np.ones((rows, cols))
compare = np.repeat([long], rows, axis=0) # alternative way to form
# assert this to confirm intuitive understanding of these matrices
np.testing.assert_array_equal(longxy, compare)
@@ -315,7 +345,7 @@ def _get_longxy_latixy(self, _min_lon, _max_lon, _min_lat, _max_lat):
lati = np.arange(_min_lat, _max_lat + 1)
self.assertEqual(min(lati), _min_lat)
self.assertEqual(max(lati), _max_lat)
- latixy_transp = lati * np.ones((cols,rows))
+ latixy_transp = lati * np.ones((cols, rows))
compare = np.repeat([lati], cols, axis=0) # alternative way to form
# assert this to confirm intuitive understanding of these matrices
np.testing.assert_array_equal(latixy_transp, compare)
@@ -323,6 +353,7 @@ def _get_longxy_latixy(self, _min_lon, _max_lon, _min_lat, _max_lat):
return longxy, latixy, cols, rows
-if __name__ == '__main__':
+
+if __name__ == "__main__":
unit_testing.setup_for_tests()
unittest.main()
diff --git a/python/ctsm/test/test_unit_run_sys_tests.py b/python/ctsm/test/test_unit_run_sys_tests.py
index 8a53081a5b..218001c7f7 100755
--- a/python/ctsm/test/test_unit_run_sys_tests.py
+++ b/python/ctsm/test/test_unit_run_sys_tests.py
@@ -124,6 +124,9 @@ def test_createTestCommand_testnames(self):
assertNotRegex(self, command, r'--compare\s')
assertNotRegex(self, command, r'--generate\s')
assertNotRegex(self, command, r'--baseline-root\s')
+ # In the machine object for this test, create_test_queue will be 'unspecified';
+ # verify that this results in there being no '--queue' argument:
+ assertNotRegex(self, command, r'--queue\s')
expected_cs_status = os.path.join(self._scratch,
self._expected_testroot(),
diff --git a/python/ctsm/test/test_unit_singlept_data.py b/python/ctsm/test/test_unit_singlept_data.py
new file mode 100755
index 0000000000..b924d49762
--- /dev/null
+++ b/python/ctsm/test/test_unit_singlept_data.py
@@ -0,0 +1,339 @@
+#!/usr/bin/env python3
+"""
+Unit tests for SinglePointCase
+
+You can run this by:
+ python -m unittest test_unit_singlept_data.py
+"""
+
+import unittest
+import argparse
+import os
+import sys
+
+# -- add python/ctsm to path (needed if we want to run the test stand-alone)
+_CTSM_PYTHON = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir
+)
+sys.path.insert(1, _CTSM_PYTHON)
+
+# pylint: disable=wrong-import-position
+from ctsm import unit_testing
+from ctsm.site_and_regional.single_point_case import SinglePointCase
+
+# pylint: disable=invalid-name
+
+
+class TestSinglePointCase(unittest.TestCase):
+ """
+ Basic class for testing SinglePointCase class in single_point_case.py.
+ """
+
+ plat = 20.1
+ plon = 50.5
+ site_name = None
+ create_domain = True
+ create_surfdata = True
+ create_landuse = True
+ create_datm = True
+ create_user_mods = True
+ dom_pft = [8]
+ pct_pft = None
+ num_pft = 16
+ include_nonveg = False
+ uni_snow = True
+ cap_saturation = True
+ out_dir = os.getcwd()
+ overwrite = False
+
+ def test_create_tag_noname(self):
+ """
+ Test create_tag when site_name is NOT given.
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+
+ single_point.create_tag()
+ self.assertEqual(single_point.tag, "50.5_20.1")
+
+ def test_create_tag_name(self):
+ """
+ Test create_tag when site_name is given.
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.site_name = "foo"
+ single_point.create_tag()
+ self.assertEqual(single_point.tag, "foo")
+
+ def test_check_dom_pft_too_big(self):
+ """
+ Test check_dom_pft
+ When one of the given dom_pft(s) are bigger than 78
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [16, 36, 79]
+ with self.assertRaisesRegex(
+ argparse.ArgumentTypeError, "values for --dompft should*"
+ ):
+ single_point.check_dom_pft()
+
+ def test_check_dom_pft_too_small(self):
+ """
+ Test check_dom_pft
+ When one of the given dom_pft(s) are bigger than 1
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [16, 36, -1]
+ with self.assertRaisesRegex(
+ argparse.ArgumentTypeError, "values for --dompft should*"
+ ):
+ single_point.check_dom_pft()
+
+ def test_check_dom_pft_numpft(self):
+ """
+ Test check_dom_pft
+ When dom_pft > 15 but no crop (aka num_pft =<15)
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [15, 53]
+ single_point.num_pft = 16
+ with self.assertRaisesRegex(argparse.ArgumentTypeError, "Please use --crop*"):
+ single_point.check_dom_pft()
+
+ def test_check_dom_pft_mixed_range(self):
+ """
+ Test check_dom_pft
+ Test if all dom_pft(s) are in the same range of either 1-15 or 16-78
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [1, 5, 15]
+ single_point.num_pft = 78
+ with self.assertRaisesRegex(
+ argparse.ArgumentTypeError, "mixed land units is not possible*"
+ ):
+ single_point.check_dom_pft()
+
+ def test_check_nonveg_nodompft(self):
+ """
+ Test check_nonveg
+ If include_nonveg =False and no dompft it should complain.
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = None
+ single_point.include_nonveg = False
+ with self.assertRaisesRegex(
+ argparse.ArgumentTypeError,
+ "To include non-veg land units, you need to specify*",
+ ):
+ single_point.check_nonveg()
+
+ def test_check_pct_pft_notsamenumbers(self):
+ """
+ Test check_pct_pft
+ Check if pct_pft is the same length as dom_pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [1, 5]
+ single_point.pct_pft = [0.5]
+ with self.assertRaisesRegex(
+ argparse.ArgumentTypeError, "Please provide the same number of inputs*"
+ ):
+ single_point.check_pct_pft()
+
+ def test_check_pct_pft_sum_not1(self):
+ """
+ Test check_pct_pft
+ Check if pct_pft adds up to 1 or 100.
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [1, 5]
+ single_point.pct_pft = [0.1, 0.5]
+ with self.assertRaisesRegex(
+ argparse.ArgumentTypeError, "Sum of --pctpft values should be equal to 1*"
+ ):
+ single_point.check_pct_pft()
+
+ def test_check_pct_pft_fraction_topct(self):
+ """
+ Test check_pct_pft
+ Check if pct_pft is corretly converted to percent.
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [1, 5, 8]
+ single_point.pct_pft = [0.5, 0.4, 0.1]
+ single_point.check_pct_pft()
+ self.assertEqual(single_point.pct_pft, [50, 40, 10])
+
+
+if __name__ == "__main__":
+ unit_testing.setup_for_tests()
+ unittest.main()
diff --git a/python/ctsm/test/test_unit_singlept_data_surfdata.py b/python/ctsm/test/test_unit_singlept_data_surfdata.py
new file mode 100755
index 0000000000..c2392e766e
--- /dev/null
+++ b/python/ctsm/test/test_unit_singlept_data_surfdata.py
@@ -0,0 +1,1000 @@
+#!/usr/bin/env python3
+"""
+Unit tests for creating and modifying surface datasets in SinglePointCase
+
+for the rest of SinglePointCase tests please see : test_unit_singlept_data
+
+You can run this by:
+ python -m unittest test_unit_singlept_data_surfdata.py
+"""
+
+import unittest
+import os
+import sys
+
+
+import numpy as np
+import xarray as xr
+
+# -- add python/ctsm to path (needed if we want to run the test stand-alone)
+_CTSM_PYTHON = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir
+)
+sys.path.insert(1, _CTSM_PYTHON)
+
+# pylint: disable=wrong-import-position
+from ctsm import unit_testing
+from ctsm.site_and_regional.single_point_case import SinglePointCase
+
+# pylint: disable=invalid-name
+
+
+class TestSinglePointCaseSurfaceNoCrop(unittest.TestCase):
+ """
+ Basic class for testing creating and modifying surface dataset for
+ non-crop cases (aka using 16 pft dataset) in SinglePointCase class in single_point_case.py.
+
+ """
+
+ plat = 20.1
+ plon = 50.5
+ site_name = None
+ create_domain = True
+ create_surfdata = True
+ create_landuse = True
+ create_datm = True
+ create_user_mods = True
+ dom_pft = [8]
+ pct_pft = None
+ num_pft = 16
+ include_nonveg = False
+ uni_snow = True
+ cap_saturation = True
+ out_dir = os.getcwd()
+ overwrite = False
+
+ # -- dimensions of xarray dataset
+ lsmlat = [plat]
+ lsmlon = [plon]
+ natpft = np.arange(0, 15, 1, dtype=int)
+ cft = np.arange(15, 17, 1, dtype=int)
+ numurbl = np.arange(0, 3, 1, dtype=int)
+
+ ds_test = xr.Dataset(
+ {
+ "PCT_NATVEG": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={
+ "long_name": "total percent natural vegetation landunit",
+ "units": "unitless",
+ },
+ ),
+ "PCT_CROP": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={"long_name": "total percent crop landunit", "units": "unitless"},
+ ),
+ "PCT_NAT_PFT": xr.DataArray(
+ data=np.random.rand(1, 1, 15),
+ dims=["lsmlat", "lsmlon", "natpft"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "natpft": natpft},
+ attrs={
+ "long_name": "percent plant functional type on the natural veg landunit",
+ "units": "unitless",
+ },
+ ),
+ "PCT_CFT": xr.DataArray(
+ data=np.random.rand(1, 1, 2),
+ dims=["lsmlat", "lsmlon", "cft"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "cft": cft},
+ attrs={
+ "long_name": "percent crop functional type on the crop landunit",
+ "units": "unitless",
+ },
+ ),
+ "PCT_LAKE": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={"long_name": "percent lake", "units": "unitless"},
+ ),
+ "PCT_WETLAND": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={"long_name": "percent wetland", "units": "unitless"},
+ ),
+ "PCT_URBAN": xr.DataArray(
+ data=np.random.rand(1, 1, 3),
+ dims=["lsmlat", "lsmlon", "numurbl"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "numurbl": numurbl},
+ attrs={
+ "long_name": "percent urban for each density type",
+ "units": "unitless",
+ },
+ ),
+ "PCT_GLACIER": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={"long_name": "percent glacier", "units": "unitless"},
+ ),
+ "STD_ELEV": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={"long_name": "standard deviation of elevation", "units": "m"},
+ ),
+ "FMAX": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={
+ "long_name": "maximum fractional saturated area",
+ "units": "unitless",
+ },
+ ),
+ },
+ attrs={"Conventions": "test data only"},
+ )
+
+ def test_modify_surfdata_atpoint_nocrop_1pft_pctnatpft(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks PCT_NAT_PFT for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [5]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ expected_out = np.zeros((1, 1, 15))
+ expected_out[:, :, 5] = 100
+
+ # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100)
+ np.testing.assert_array_equal(ds_out["PCT_NAT_PFT"].data, expected_out)
+
+ def test_modify_surfdata_atpoint_nocrop_1pft_pctnatveg(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks PCT_NATVEG for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [5]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["PCT_NATVEG"].data[:, :], 100)
+
+ def test_modify_surfdata_atpoint_nocrop_1pft_pctcrop(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks PCT_CROP for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [5]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["PCT_CROP"].data[:, :], 0)
+
+ def test_modify_surfdata_atpoint_nocrop_1pft_glacier(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks GLACIER for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.include_nonveg = False
+ single_point.dom_pft = [5]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["PCT_GLACIER"].data[:, :], 0)
+
+ def test_modify_surfdata_atpoint_nocrop_1pft_wetland(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks WETLAND for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [5]
+
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["PCT_WETLAND"].data[:, :], 0)
+
+ def test_modify_surfdata_atpoint_nocrop_1pft_lake(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks PCT_LAKE for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [5]
+
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["PCT_LAKE"].data[:, :], 0)
+
+ def test_modify_surfdata_atpoint_nocrop_1pft_unisnow(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks STD_ELV for one pft and unisnow
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.include_nonveg = False
+ single_point.dom_pft = [5]
+ single_point.uni_snow = True
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["STD_ELEV"].data[:, :], 20)
+
+ def test_modify_surfdata_atpoint_nocrop_1pft_capsat(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks FMAX for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.include_nonveg = False
+ single_point.dom_pft = [5]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+ single_point.cap_saturation = True
+
+ self.assertEqual(ds_out["FMAX"].data[:, :], 0)
+
+ def test_modify_surfdata_atpoint_nocrop_multipft(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks PCT_NAT_PFT for multi pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.include_nonveg = False
+ single_point.dom_pft = [1, 3, 5]
+ single_point.pct_pft = [0.5, 0.4, 0.1]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ expected_out = np.zeros((1, 1, 15))
+ expected_out[:, :, 1] = 0.5
+ expected_out[:, :, 3] = 0.4
+ expected_out[:, :, 5] = 0.1
+
+ # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100)
+ np.testing.assert_array_equal(ds_out["PCT_NAT_PFT"].data, expected_out)
+
+ def test_modify_surfdata_atpoint_nocrop_urban_nononveg(self):
+ """
+ Test modify_surfdata_atpoint for crop cases
+ Checks URBAN for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.include_nonveg = False
+ single_point.dom_pft = [7]
+ single_point.plat = [34.05]
+ single_point.plon = [118.25]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ expected_out = np.zeros((1, 1, 3))
+
+ # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100)
+ np.testing.assert_array_equal(ds_out["PCT_URBAN"].data, expected_out)
+
+ def test_modify_surfdata_atpoint_nocrop_urban_include_nonveg(self):
+ """
+ Test modify_surfdata_atpoint for crop cases
+ Checks URBAN for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.include_nonveg = True
+ single_point.dom_pft = [7]
+ single_point.plat = [34.05]
+ single_point.plon = [118.25]
+
+ # -- change it to something known
+ self.ds_test["PCT_URBAN"][:, :, :] = 1
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ expected_out = np.ones((1, 1, 3))
+
+ # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100)
+ np.testing.assert_array_equal(ds_out["PCT_URBAN"].data, expected_out)
+
+ def test_modify_surfdata_atpoint_nocrop_wetland_include_nonveg(self):
+ """
+ Test modify_surfdata_atpoint for crop cases
+ Checks PCT_WETLAND for one pft to make sure it is not zerod-out
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.include_nonveg = True
+ single_point.dom_pft = [7]
+
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertNotEqual(ds_out["PCT_WETLAND"].data[:, :], 0)
+
+
+class TestSinglePointCaseSurfaceCrop(unittest.TestCase):
+ """
+ Basic class for testing creating and modifying surface dataset for
+ crop cases (aka using 78 pft dataset) in SinglePointCase class in single_point_case.py.
+ """
+
+ plat = 20.1
+ plon = 50.5
+ site_name = None
+ create_domain = True
+ create_surfdata = True
+ create_landuse = True
+ create_datm = True
+ create_user_mods = True
+ dom_pft = [17]
+ pct_pft = None
+ num_pft = 78
+ include_nonveg = False
+ uni_snow = False
+ cap_saturation = False
+ out_dir = os.getcwd()
+ overwrite = False
+
+ # -- dimensions of xarray dataset
+ lsmlat = [plat]
+ lsmlon = [plon]
+ natpft = np.arange(0, 15, 1, dtype=int)
+ cft = np.arange(15, 79, 1, dtype=int)
+ numurbl = np.arange(0, 3, 1, dtype=int)
+
+ ds_test = xr.Dataset(
+ {
+ "PCT_NATVEG": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={
+ "long_name": "total percent natural vegetation landunit",
+ "units": "unitless",
+ },
+ ),
+ "PCT_CROP": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={"long_name": "total percent crop landunit", "units": "unitless"},
+ ),
+ "PCT_NAT_PFT": xr.DataArray(
+ data=np.random.rand(1, 1, 15),
+ dims=["lsmlat", "lsmlon", "natpft"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "natpft": natpft},
+ attrs={
+ "long_name": "percent plant functional type on the natural veg landunit",
+ "units": "unitless",
+ },
+ ),
+ "PCT_CFT": xr.DataArray(
+ data=np.random.rand(1, 1, 64),
+ dims=["lsmlat", "lsmlon", "cft"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "cft": cft},
+ attrs={
+ "long_name": "percent crop functional type on the crop landunit",
+ "units": "unitless",
+ },
+ ),
+ "PCT_LAKE": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={"long_name": "percent lake", "units": "unitless"},
+ ),
+ "PCT_WETLAND": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={"long_name": "percent wetland", "units": "unitless"},
+ ),
+ "PCT_URBAN": xr.DataArray(
+ data=np.random.rand(1, 1, 3),
+ dims=["lsmlat", "lsmlon", "numurbl"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "numurbl": numurbl},
+ attrs={
+ "long_name": "percent urban for each density type",
+ "units": "unitless",
+ },
+ ),
+ "PCT_GLACIER": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={"long_name": "percent glacier", "units": "unitless"},
+ ),
+ "STD_ELEV": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={"long_name": "standard deviation of elevation", "units": "m"},
+ ),
+ "FMAX": xr.DataArray(
+ data=np.random.rand(1, 1),
+ dims=["lsmlat", "lsmlon"],
+ coords={"lsmlat": lsmlat, "lsmlon": lsmlon},
+ attrs={
+ "long_name": "maximum fractional saturated area",
+ "units": "unitless",
+ },
+ ),
+ },
+ attrs={"Conventions": "test data only"},
+ )
+
+ def test_modify_surfdata_atpoint_crop_1pft_pctnatpft(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks PCT_NAT_PFT for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [19]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ expected_out = np.zeros((1, 1, 64))
+ expected_out[:, :, 4] = 100
+
+ # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100)
+ np.testing.assert_array_equal(ds_out["PCT_CFT"].data, expected_out)
+
+ def test_modify_surfdata_atpoint_crop_1pft_pctnatveg(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks PCT_NATVEG for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [17]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["PCT_NATVEG"].data[:, :], 0)
+
+ def test_modify_surfdata_atpoint_crop_1pft_pctcrop(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks PCT_CROP for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [17]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["PCT_CROP"].data[:, :], 100)
+
+ def test_modify_surfdata_atpoint_crop_1pft_glacier(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks GLACIER for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [17]
+
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["PCT_GLACIER"].data[:, :], 0)
+
+ def test_modify_surfdata_atpoint_crop_1pft_wetland(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks WETLAND for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [17]
+
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["PCT_WETLAND"].data[:, :], 0)
+
+ def test_modify_surfdata_atpoint_crop_1pft_lake(self):
+ """
+ Test modify_surfdata_atpoint
+ Checks PCT_LAKE for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [17]
+
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["PCT_LAKE"].data[:, :], 0)
+
+ def test_modify_surfdata_atpoint_crop_1pft_unisnow(self):
+ """
+ Test modify_surfdata_atpoint for crop cases
+ Checks STD_ELV for one pft and unisnow
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [17]
+ single_point.uni_snow = True
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertEqual(ds_out["STD_ELEV"].data[:, :], 20)
+
+ def test_modify_surfdata_atpoint_crop_1pft_capsat(self):
+ """
+ Test modify_surfdata_atpoint for crop cases
+ Checks FMAX for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.cap_saturation = True
+ single_point.dom_pft = [22]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+ single_point.cap_saturation = True
+
+ self.assertEqual(ds_out["FMAX"].data[:, :], 0)
+
+ def test_modify_surfdata_atpoint_crop_multipft(self):
+ """
+ Test modify_surfdata_atpoint for crop cases
+ Checks PCT_NAT_PFT for multi pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.dom_pft = [17, 22]
+ single_point.pct_pft = [0.6, 0.4]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ expected_out = np.zeros((1, 1, 64))
+ expected_out[:, :, 2] = 0.6
+ expected_out[:, :, 7] = 0.4
+
+ # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100)
+ np.testing.assert_array_equal(ds_out["PCT_CFT"].data, expected_out)
+
+ def test_modify_surfdata_atpoint_crop_urban_nononveg(self):
+ """
+ Test modify_surfdata_atpoint for crop cases
+ Checks URBAN for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.include_nonveg = False
+ single_point.dom_pft = [17]
+ single_point.plat = [34.05]
+ single_point.plon = [118.25]
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ expected_out = np.zeros((1, 1, 3))
+
+ # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100)
+ np.testing.assert_array_equal(ds_out["PCT_URBAN"].data, expected_out)
+
+ def test_modify_surfdata_atpoint_crop_urban_include_nonveg(self):
+ """
+ Test modify_surfdata_atpoint for crop cases
+ Checks URBAN for one pft
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.include_nonveg = True
+ single_point.dom_pft = [17]
+ single_point.plat = [34.05]
+ single_point.plon = [118.25]
+
+ # -- change it to something known
+ self.ds_test["PCT_URBAN"][:, :, :] = 1
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ expected_out = np.ones((1, 1, 3))
+
+ # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100)
+ np.testing.assert_array_equal(ds_out["PCT_URBAN"].data, expected_out)
+
+ def test_modify_surfdata_atpoint_crop_lake_include_nonveg(self):
+ """
+ Test modify_surfdata_atpoint for crop cases
+ Checks PCT_LAKE for one pft to make sure it is not zerod-out
+ """
+ single_point = SinglePointCase(
+ plat=self.plat,
+ plon=self.plon,
+ site_name=self.site_name,
+ create_domain=self.create_domain,
+ create_surfdata=self.create_surfdata,
+ create_landuse=self.create_landuse,
+ create_datm=self.create_datm,
+ create_user_mods=self.create_user_mods,
+ dom_pft=self.dom_pft,
+ pct_pft=self.pct_pft,
+ num_pft=self.num_pft,
+ include_nonveg=self.include_nonveg,
+ uni_snow=self.uni_snow,
+ cap_saturation=self.cap_saturation,
+ out_dir=self.out_dir,
+ overwrite=self.overwrite,
+ )
+ single_point.include_nonveg = True
+ single_point.dom_pft = [17]
+
+ ds_out = single_point.modify_surfdata_atpoint(self.ds_test)
+
+ self.assertNotEqual(ds_out["PCT_LAKE"].data[:, :], 0)
+
+
+if __name__ == "__main__":
+ unit_testing.setup_for_tests()
+ unittest.main()
diff --git a/python/ctsm/test/test_unit_utils.py b/python/ctsm/test/test_unit_utils.py
index ead0d8ce5a..cad2a7d1af 100755
--- a/python/ctsm/test/test_unit_utils.py
+++ b/python/ctsm/test/test_unit_utils.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
-"""Unit tests for utils
+"""Unit tests for utils and config_utils
"""
import tempfile
@@ -9,8 +9,8 @@
import os
from ctsm import unit_testing
-from ctsm.utils import (fill_template_file, lon_range_0_to_360,
- _handle_config_value)
+from ctsm.utils import fill_template_file
+from ctsm.config_utils import lon_range_0_to_360, _handle_config_value
# Allow names that pylint doesn't like, because otherwise I find it hard
# to make readable unit test names
diff --git a/python/ctsm/test/testinputs/surfdata_5x5_amazon_16pfts_Irrig_CMIP6_simyr2000_c171214_modified_with_crop.nc b/python/ctsm/test/testinputs/surfdata_5x5_amazon_16pfts_Irrig_CMIP6_simyr2000_c171214_modified_with_crop.nc
new file mode 100644
index 0000000000..69f28b2239
--- /dev/null
+++ b/python/ctsm/test/testinputs/surfdata_5x5_amazon_16pfts_Irrig_CMIP6_simyr2000_c171214_modified_with_crop.nc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0217926e5dea2f563a01ad7149be68cf6d0acb0a140715a5402fdf39a925b3e7
+size 247880
diff --git a/python/ctsm/toolchain/ctsm_case.py b/python/ctsm/toolchain/ctsm_case.py
index 86a6861efb..fe077ef3aa 100755
--- a/python/ctsm/toolchain/ctsm_case.py
+++ b/python/ctsm/toolchain/ctsm_case.py
@@ -13,7 +13,7 @@
from datetime import datetime
-from ctsm.git_utils import tag_describe
+from ctsm.git_utils import get_ctsm_git_describe
# -- import local classes for this script
logger = logging.getLogger(__name__)
@@ -267,7 +267,7 @@ def create_namelist_file(self):
self.build_namelist_filename()
with open(self.namelist_fname, "w", encoding='utf-8') as namelist_file:
- label = tag_describe()
+ label = get_ctsm_git_describe()
dst_mesh = which_mesh(self.res)
diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py
index 4f0460796c..70bef1d951 100644
--- a/python/ctsm/utils.py
+++ b/python/ctsm/utils.py
@@ -5,22 +5,12 @@
import sys
import string
import pdb
-import subprocess
from datetime import date
from getpass import getuser
-from configparser import NoSectionError, NoOptionError
-from ctsm.path_utils import path_to_ctsm_root
logger = logging.getLogger(__name__)
-# This string is used in the out-of-the-box ctsm.cfg and modify.cfg files
-# to denote a value that needs to be filled in
-_CONFIG_PLACEHOLDER = 'FILL_THIS_IN'
-# This string is used in the out-of-the-box ctsm.cfg and modify.cfg files
-# to denote a value that can be filled in, but doesn't absolutely need to be
-_CONFIG_UNSET = 'UNSET'
-
def abort(errmsg):
"""Abort the program with the given error message
@@ -29,7 +19,8 @@ def abort(errmsg):
if logger.isEnabledFor(logging.DEBUG):
pdb.set_trace()
- sys.exit('ERROR: {}'.format(errmsg))
+ sys.exit("ERROR: {}".format(errmsg))
+
def fill_template_file(path_to_template, path_to_final, substitutions):
"""Given a template file (based on python's template strings), write a copy of the
@@ -45,43 +36,42 @@ def fill_template_file(path_to_template, path_to_final, substitutions):
template_file_contents = template_file.read()
template = string.Template(template_file_contents)
final_file_contents = template.substitute(substitutions)
- with open(path_to_final, 'w') as final_file:
+ with open(path_to_final, "w") as final_file:
final_file.write(final_file_contents)
-def get_git_sha():
- """
- Returns Git short SHA for the currect directory.
- """
- return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode()
-
-def get_ctsm_git_sha():
- """
- Returns Git short SHA for the ctsm directory.
- """
- return subprocess.check_output(['git', '-C', path_to_ctsm_root(),
- 'rev-parse', '--short', 'HEAD']).strip().decode()
-
def add_tag_to_filename(filename, tag):
"""
Add a tag and replace timetag of a filename
Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc
Add the tag to just before that ending part
- and change the ending part to the current time tag
- """
+ and change the ending part to the current time tag.
+
+ Parameters
+ ----------
+ filename (str) : file name
+ tag (str) : string of a tag to be added to the end of filename
+
+ Raises
+ ------
+ Error: When it cannot find . and _ in the filename.
+
+ Returns
+ ------
+ fname_out (str): filename with the tag and date string added
+ """
basename = os.path.basename(filename)
cend = -10
-
if basename[cend] == "c":
cend = cend - 1
- if ( (basename[cend] != ".") and (basename[cend] != "_") ):
- errmsg = 'Trouble figuring out where to add tag to filename: ' + filename
- abort(errmsg)
-
+ if (basename[cend] != ".") and (basename[cend] != "_"):
+ err_msg = "Trouble figuring out where to add tag to filename: " + filename
+ abort(err_msg)
today = date.today()
today_string = today.strftime("%y%m%d")
+ fname_out = basename[:cend] + "_" + tag + "_c" + today_string + ".nc"
+ return fname_out
- return basename[:cend] + "_" + tag + "_c" + today_string + '.nc'
def update_metadata(file, title, summary, contact, data_script, description):
"""
@@ -103,129 +93,33 @@ def update_metadata(file, title, summary, contact, data_script, description):
would be good (sys.argv) here or in data_script.
"""
- #update attributes
+ # update attributes
today = date.today()
today_string = today.strftime("%Y-%m-%d")
# This is the required metadata for inputdata files
- file.attrs['title'] = title
- file.attrs['summary'] = summary
- file.attrs['creator'] = getuser()
- file.attrs['contact'] = contact
- file.attrs['creation_date'] = today_string
- file.attrs['data_script'] = data_script
- file.attrs['description'] = description
-
- #delete unrelated attributes if they exist
- del_attrs = ['source_code', 'SVN_url', 'hostname', 'history'
- 'History_Log', 'Logname', 'Host', 'Version',
- 'Compiler_Optimized']
+ file.attrs["title"] = title
+ file.attrs["summary"] = summary
+ file.attrs["creator"] = getuser()
+ file.attrs["contact"] = contact
+ file.attrs["creation_date"] = today_string
+ file.attrs["data_script"] = data_script
+ file.attrs["description"] = description
+
+ # delete unrelated attributes if they exist
+ del_attrs = [
+ "source_code",
+ "SVN_url",
+ "hostname",
+ "history",
+ "History_Log",
+ "Logname",
+ "Host",
+ "Version",
+ "Compiler_Optimized",
+ ]
attr_list = file.attrs
for attr in del_attrs:
if attr in attr_list:
del file.attrs[attr]
-
-def lon_range_0_to_360(lon_in):
- """
- Description
- -----------
- Restrict longitude to 0 to 360 when given as -180 to 180.
- """
- if -180 <= lon_in < 0:
- lon_out = lon_in + 360
- logger.info('Resetting longitude from %s to %s to keep in the range ' \
- ' 0 to 360', str(lon_in), str(lon_out))
- elif 0 <= lon_in <= 360 or lon_in is None:
- lon_out = lon_in
- else:
- errmsg = 'lon_in needs to be in the range 0 to 360'
- abort(errmsg)
-
- return lon_out
-
-def get_config_value(config, section, item, file_path, allowed_values=None,
- default=None, is_list=False, convert_to_type=None,
- can_be_unset=False):
- """Get a given item from a given section of the config object
- Give a helpful error message if we can't find the given section or item
- Note that the file_path argument is only used for the sake of the error message
- If allowed_values is present, it should be a list of strings giving allowed values
- The function _handle_config_value determines what to do if we read:
- - a list or
- - a str that needs to be converted to int / float / bool
- - _CONFIG_UNSET: anything with the value "UNSET" will become "None"
- """
- try:
- val = config.get(section, item)
- except NoSectionError:
- abort("ERROR: Config file {} must contain section '{}'".format(file_path, section))
- except NoOptionError:
- abort("ERROR: Config file {} must contain item '{}' in section '{}'".format(
- file_path, item, section))
-
- if val == _CONFIG_PLACEHOLDER:
- abort("Error: {} needs to be specified in config file {}".format(item, file_path))
-
- val = _handle_config_value(var=val, default=default, item=item,
- is_list=is_list, convert_to_type=convert_to_type,
- can_be_unset=can_be_unset, allowed_values=allowed_values)
-
- return val
-
-def _handle_config_value(var, default, item, is_list, convert_to_type,
- can_be_unset, allowed_values):
- """
- Description
- -----------
- Assign the default value or the user-specified one to var.
- Convert from default type (str) to reqested type (int or float).
-
- If is_list is True, then default should be a list
- """
- if var == _CONFIG_UNSET:
- if can_be_unset:
- return default # default may be None
- abort('Must set a value for .cfg file variable: {}'.format(item))
-
- # convert string to list of strings; if there is just one element,
- # we will get a list of size one, which we will convert back to a
- # scalar later if needed
- var = var.split()
-
- if convert_to_type is bool:
- try:
- var = [_convert_to_bool(v) for v in var]
- except ValueError:
- abort('Non-boolean value found for .cfg file variable: {}'.format(item))
- elif convert_to_type is not None:
- try:
- var = [convert_to_type(v) for v in var]
- except ValueError:
- abort('Wrong type for .cfg file variable: {}'.format(item))
-
- if allowed_values is not None:
- for val in var:
- if val not in allowed_values:
- print('val = ', val, ' in var not in allowed_values')
- errmsg = '{} is not an allowed value for {} in .cfg file. ' \
- 'Check allowed_values'.format(val, item)
- abort(errmsg)
-
- if not is_list:
- if len(var) > 1:
- abort('More than 1 element found for .cfg file variable: {}'.format(item))
- var = var[0]
-
- return var
-
-def _convert_to_bool(val):
- """Convert the given value to boolean
-
- Conversion is as in config files 'getboolean'
- """
- if val.lower() in ['1', 'yes', 'true', 'on']:
- return True
- if val.lower() in ['0', 'no', 'false', 'off']:
- return False
- raise ValueError("{} cannot be converted to boolean".format(val))
diff --git a/src/biogeochem/CNC14DecayMod.F90 b/src/biogeochem/CNC14DecayMod.F90
index 2fdee93d8f..d929b80c33 100644
--- a/src/biogeochem/CNC14DecayMod.F90
+++ b/src/biogeochem/CNC14DecayMod.F90
@@ -5,7 +5,7 @@ module CNC14DecayMod
!
! !USES:
use shr_kind_mod , only : r8 => shr_kind_r8
- use clm_time_manager , only : get_step_size_real, get_curr_days_per_year
+ use clm_time_manager , only : get_step_size_real, get_average_days_per_year
use clm_varpar , only : nlevdecomp, ndecomp_pools
use clm_varcon , only : secspday
use clm_varctl , only : spinup_state
@@ -87,7 +87,7 @@ subroutine C14Decay( bounds, num_soilc, filter_soilc, num_soilp, filter_soilp, &
! set time steps
dt = get_step_size_real()
- days_per_year = get_curr_days_per_year()
+ days_per_year = get_average_days_per_year()
half_life = 5730._r8 * secspday * days_per_year
decay_const = - log(0.5_r8) / half_life
diff --git a/src/biogeochem/CNFireEmissionsMod.F90 b/src/biogeochem/CNFireEmissionsMod.F90
index d3344baaaa..645f074a7d 100644
--- a/src/biogeochem/CNFireEmissionsMod.F90
+++ b/src/biogeochem/CNFireEmissionsMod.F90
@@ -3,18 +3,18 @@ module CNFireEmissionsMod
!-----------------------------------------------------------------------
! !DESCRIPTION:
! Gathers carbon emissions from fire sources to be sent to CAM-Chem via
- ! the coupler ....
+ ! the coupler ....
! Created by F. Vitt, and revised by F. Li
! !USES:
use shr_kind_mod, only : r8 => shr_kind_r8
use abortutils, only : endrun
- use PatchType, only : patch
+ use PatchType, only : patch
use decompMod, only : bounds_type
use shr_fire_emis_mod, only : shr_fire_emis_comps_n, shr_fire_emis_comp_t, shr_fire_emis_linkedlist
use shr_fire_emis_mod, only : shr_fire_emis_mechcomps_n, shr_fire_emis_mechcomps
!
implicit none
- private
+ private
!
! !PUBLIC MEMBER FUNCTIONS:
public :: CNFireEmisUpdate
@@ -71,7 +71,7 @@ subroutine Init(this, bounds)
emis_cmp => emis_cmp%next_emiscomp
enddo
- call this%InitAllocate(bounds)
+ call this%InitAllocate(bounds)
call this%InitHistory(bounds)
end subroutine Init
@@ -85,7 +85,7 @@ subroutine InitAllocate(this, bounds)
! !ARGUMENTS:
class(fireemis_type) :: this
- type(bounds_type), intent(in) :: bounds
+ type(bounds_type), intent(in) :: bounds
!
! !LOCAL VARIABLES:
integer :: beg, end, i
@@ -128,12 +128,13 @@ subroutine InitHistory(this, bounds)
! !ARGUMENTS:
class(fireemis_type) :: this
- type(bounds_type), intent(in) :: bounds
+ type(bounds_type), intent(in) :: bounds
! !LOCAL VARIABLES
integer :: begp, endp
integer :: imech, icomp
type(shr_fire_emis_comp_t), pointer :: emis_cmp
+ character(len=16) :: units
if (shr_fire_emis_mechcomps_n>0) then
@@ -143,8 +144,13 @@ subroutine InitHistory(this, bounds)
emis_cmp_loop: do while(associated(emis_cmp))
icomp = emis_cmp%index
+ if (emis_cmp%name(1:4) == 'num_') then
+ units = 'molecules/m2/sec'
+ else
+ units = 'kg/m2/sec'
+ endif
- call hist_addfld1d (fname='FireComp_'//trim(emis_cmp%name), units='kg/m2/sec', &
+ call hist_addfld1d (fname='FireComp_'//trim(emis_cmp%name), units=units, &
avgflag='A', long_name='fire emissions flux of '//trim(emis_cmp%name), &
ptr_patch=this%comp(icomp)%emis, default='inactive')
@@ -154,8 +160,13 @@ subroutine InitHistory(this, bounds)
! loop over atm chem mechanism species
do imech = 1,shr_fire_emis_mechcomps_n
+ if (shr_fire_emis_mechcomps(imech)%name(1:4) == 'num_') then
+ units = 'molecules/m2/sec'
+ else
+ units = 'kg/m2/sec'
+ endif
- call hist_addfld1d (fname='FireMech_'//trim(shr_fire_emis_mechcomps(imech)%name), units='kg/m2/sec', &
+ call hist_addfld1d (fname='FireMech_'//trim(shr_fire_emis_mechcomps(imech)%name), units=units, &
avgflag='A', long_name='fire emissions flux of '//trim(shr_fire_emis_mechcomps(imech)%name), &
ptr_patch=this%mech(imech)%emis, default='inactive')
@@ -170,29 +181,29 @@ subroutine InitHistory(this, bounds)
ptr_patch=this%ztop_patch, default='inactive')
endif
-
+
end subroutine InitHistory
!-----------------------------------------------------------------------
subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnveg_cs_inst, fireemis_inst )
use CNVegcarbonfluxType, only : cnveg_carbonflux_type
- use CNVegCarbonStateType, only : cnveg_carbonstate_type
+ use CNVegCarbonStateType, only : cnveg_carbonstate_type
use clm_varpar, only : ndecomp_pools, nlevdecomp
use clm_varcon, only : dzsoi_decomp
!ARGUMENTS:
- type(bounds_type), intent(in) :: bounds
+ type(bounds_type), intent(in) :: bounds
integer, intent(in) :: num_soilp ! number of soil pfts in filter
integer, intent(in) :: filter_soilp(:) ! filter for soil pfts
type(cnveg_carbonflux_type), intent(in) :: cnveg_cf_inst
- type(cnveg_carbonstate_type),intent(in) :: cnveg_cs_inst
+ type(cnveg_carbonstate_type),intent(in) :: cnveg_cs_inst
type(fireemis_type), intent(inout) :: fireemis_inst
!LOCAL VARIABLES:
real(r8) :: fire_flux
- real(r8) :: fire_flux_lf
- real(r8) :: fire_flux_lf1
+ real(r8) :: fire_flux_lf
+ real(r8) :: fire_flux_lf1
type(shr_fire_emis_comp_t), pointer :: emis_cmp
real(r8) :: emis_flux(shr_fire_emis_comps_n)
integer :: fp,p,g,c ! indices
@@ -201,7 +212,7 @@ subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnve
if ( shr_fire_emis_mechcomps_n < 1) return
- associate( &
+ associate( &
fire_emis => fireemis_inst%fireflx_patch, &
totfire => fireemis_inst%totfire, &
mech => fireemis_inst%mech, &
@@ -236,7 +247,7 @@ subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnve
! calculate fire emissions for non-bare ground PFTs
if (patch%itype(p) > 0)then
if(cnveg_cs_inst%totvegc_col(c) > 0._r8)then
- fire_flux_lf1=0._r8
+ fire_flux_lf1=0._r8
do l = 1, ndecomp_pools
do j = 1, nlevdecomp
fire_flux_lf1 = fire_flux_lf1 + &
@@ -245,7 +256,7 @@ subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnve
end do
fire_flux_lf = fire_flux_lf1*cnveg_cs_inst%totvegc_patch(p)/cnveg_cs_inst%totvegc_col(c)
else
- fire_flux_lf=0._r8
+ fire_flux_lf=0._r8
end if
fire_flux = fire_flux_lf &
+ cnveg_cf_inst%m_leafc_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from leafc
@@ -261,13 +272,13 @@ subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnve
+ cnveg_cf_inst%m_frootc_storage_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from frootc_storage
+ cnveg_cf_inst%m_frootc_xfer_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from frootc_xfer
+ cnveg_cf_inst%m_livecrootc_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from livecrootc
- + cnveg_cf_inst%m_livecrootc_storage_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from livecrootc_storage
+ + cnveg_cf_inst%m_livecrootc_storage_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from livecrootc_storage
+ cnveg_cf_inst%m_livecrootc_xfer_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from livecrootc_xfer
+ cnveg_cf_inst%m_deadcrootc_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from deadcrootc
+ cnveg_cf_inst%m_deadcrootc_storage_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from deadcrootc_storage
+ cnveg_cf_inst%m_deadcrootc_xfer_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from deadcrootc_xfer
+ cnveg_cf_inst%m_gresp_storage_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from gresp_storage
- + cnveg_cf_inst%m_gresp_xfer_to_fire_patch (p) ! (gC/m2/s) fire C emissions from gresp_xfer
+ + cnveg_cf_inst%m_gresp_xfer_to_fire_patch (p) ! (gC/m2/s) fire C emissions from gresp_xfer
! for diagnostics
totfire%emis(p) = fire_flux ! gC/m2/sec
@@ -279,27 +290,27 @@ subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnve
epsilon = emis_cmp%emis_factors(patch%itype(p))
comp(icomp)%emis(p) = epsilon * fire_flux* 1.e-3_r8/0.5_r8 ! (to convert gC/m2/sec to kg species/m2/sec)
- emis_flux(icomp) = emis_cmp%coeff*comp(icomp)%emis(p)
+ emis_flux(icomp) = comp(icomp)%emis(p)
emis_cmp => emis_cmp%next_emiscomp
enddo emis_cmp_loop
- ! sum up the emissions compontent fluxes for the fluxes of chem mechanism compounds
+ ! sum up the emissions compontent fluxes for the fluxes of chem mechanism compounds
do imech = 1,shr_fire_emis_mechcomps_n
n_emis_comps = shr_fire_emis_mechcomps(imech)%n_emis_comps
do icomp = 1,n_emis_comps ! loop over number of emission components that make up the nth mechanism compoud
ii = shr_fire_emis_mechcomps(imech)%emis_comps(icomp)%ptr%index
- fire_emis(p,imech) = fire_emis(p,imech) + emis_flux(ii)
- mech(imech)%emis(p) = fire_emis(p,imech)
+ fire_emis(p,imech) = fire_emis(p,imech) + shr_fire_emis_mechcomps(imech)%coeffs(icomp)*emis_flux(ii)
enddo
+ mech(imech)%emis(p) = fire_emis(p,imech)
enddo
ztop(p) = vert_dist_top( patch%itype(p) )
end if ! ivt(1:15 only)
- enddo ! fp
+ enddo ! fp
end associate
end subroutine CNFireEmisUpdate
@@ -307,23 +318,23 @@ end subroutine CNFireEmisUpdate
! Private methods
!-----------------------------------------------------------------------
!ztop compiled from Val Martin et al ACP 2010, Tosca et al. JGR 2011 and Jian et al., ACP 2013
-!st ztop updated based on Val Martin pers. communication Jan2015
+!st ztop updated based on Val Martin pers. communication Jan2015
!-----------------------------------------------------------------------
-! not_vegetated 500 m
+! not_vegetated 500 m
!PFT1: needleleaf_evergreen_temperate_tree 4000 m
!2: needleleaf_evergreen_boreal_tree 4000 m
-!3: needleleaf_deciduous_boreal_tree 3000 m
-!4: broadleaf_evergreen_tropical_tree 2500 m
-!5: broadleaf_evergreen_temperate_tree 3000 m
-!6: broadleaf_deciduous_tropical_tree 2500 m
-!7: broadleaf_deciduous_temperate_tree 3000 m
-!8: broadleaf_deciduous_boreal_tree 3000 m
-!9: broadleaf_evergreen_shrub 2000 m
-!10: broadleaf_deciduous_temperate_shrub 2000 m
-!11: broadleaf_deciduous_boreal_shrub 2000 m
-!12: c3_arctic_grass 1000 m
-!13: c3_non-arctic_grass 1000 m
-!14: c4_grass 1000 m
+!3: needleleaf_deciduous_boreal_tree 3000 m
+!4: broadleaf_evergreen_tropical_tree 2500 m
+!5: broadleaf_evergreen_temperate_tree 3000 m
+!6: broadleaf_deciduous_tropical_tree 2500 m
+!7: broadleaf_deciduous_temperate_tree 3000 m
+!8: broadleaf_deciduous_boreal_tree 3000 m
+!9: broadleaf_evergreen_shrub 2000 m
+!10: broadleaf_deciduous_temperate_shrub 2000 m
+!11: broadleaf_deciduous_boreal_shrub 2000 m
+!12: c3_arctic_grass 1000 m
+!13: c3_non-arctic_grass 1000 m
+!14: c4_grass 1000 m
!15: c3_crop 1000 m
!(and all new crops: 1000m)
@@ -374,4 +385,3 @@ function vert_dist_top( veg_type ) result(ztop)
end function vert_dist_top
end module CNFireEmissionsMod
-
diff --git a/src/biogeochem/CNGapMortalityMod.F90 b/src/biogeochem/CNGapMortalityMod.F90
index bd6867b999..b72976a096 100644
--- a/src/biogeochem/CNGapMortalityMod.F90
+++ b/src/biogeochem/CNGapMortalityMod.F90
@@ -88,7 +88,7 @@ subroutine CNGapMortality (bounds, num_soilc, filter_soilc, num_soilp, filter_so
! Gap-phase mortality routine for coupled carbon-nitrogen code (CN)
!
! !USES:
- use clm_time_manager , only: get_curr_days_per_year
+ use clm_time_manager , only: get_average_days_per_year
use clm_varpar , only: nlevdecomp_full
use clm_varcon , only: secspday
use clm_varctl , only: use_cndv, spinup_state
@@ -179,7 +179,7 @@ subroutine CNGapMortality (bounds, num_soilc, filter_soilc, num_soilp, filter_so
end if
- m = am/(get_curr_days_per_year() * secspday)
+ m = am/(get_average_days_per_year() * secspday)
!------------------------------------------------------
! patch-level gap mortality carbon fluxes
diff --git a/src/biogeochem/CNPhenologyMod.F90 b/src/biogeochem/CNPhenologyMod.F90
index 0ff1631eac..13d93238e3 100644
--- a/src/biogeochem/CNPhenologyMod.F90
+++ b/src/biogeochem/CNPhenologyMod.F90
@@ -605,7 +605,7 @@ subroutine CNEvergreenPhenology (num_soilp, filter_soilp , &
!
! !USES:
use clm_varcon , only : secspday
- use clm_time_manager , only : get_curr_days_per_year
+ use clm_time_manager , only : get_average_days_per_year
use clm_varctl , only : CN_evergreen_phenology_opt
!
! !ARGUMENTS:
@@ -618,7 +618,7 @@ subroutine CNEvergreenPhenology (num_soilp, filter_soilp , &
type(cnveg_nitrogenflux_type) , intent(inout) :: cnveg_nitrogenflux_inst
!
! !LOCAL VARIABLES:
- real(r8):: dayspyr ! Days per year
+ real(r8):: avg_dayspyr ! Average days per year
integer :: p ! indices
integer :: fp ! lake filter patch index
@@ -693,12 +693,12 @@ subroutine CNEvergreenPhenology (num_soilp, filter_soilp , &
lgsf => cnveg_state_inst%lgsf_patch & ! Output: [real(r8) (:) ] long growing season factor [0-1]
)
- dayspyr = get_curr_days_per_year()
+ avg_dayspyr = get_average_days_per_year()
do fp = 1,num_soilp
p = filter_soilp(fp)
if (evergreen(ivt(p)) == 1._r8) then
- bglfr(p) = 1._r8/(leaf_long(ivt(p)) * dayspyr * secspday)
+ bglfr(p) = 1._r8/(leaf_long(ivt(p)) * avg_dayspyr * secspday)
bgtr(p) = 0._r8
lgsf(p) = 0._r8
end if
@@ -1220,7 +1220,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , &
! per year.
!
! !USES:
- use clm_time_manager , only : get_curr_days_per_year
+ use clm_time_manager , only : get_average_days_per_year
use CNSharedParamsMod, only : use_fun
use clm_varcon , only : secspday
use shr_const_mod , only : SHR_CONST_TKFRZ, SHR_CONST_PI
@@ -1243,7 +1243,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , &
real(r8),parameter :: secspqtrday = secspday / 4 ! seconds per quarter day
integer :: g,c,p ! indices
integer :: fp ! lake filter patch index
- real(r8):: dayspyr ! days per year
+ real(r8):: avg_dayspyr ! average days per year
real(r8):: crit_onset_gdd ! degree days for onset trigger
real(r8):: soilt ! temperature of top soil layer
real(r8):: psi ! water stress of top soil layer
@@ -1338,8 +1338,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , &
deadcrootn_storage_to_xfer => cnveg_nitrogenflux_inst%deadcrootn_storage_to_xfer_patch & ! Output: [real(r8) (:) ]
)
- ! set time steps
- dayspyr = get_curr_days_per_year()
+ avg_dayspyr = get_average_days_per_year()
! specify rain threshold for leaf onset
rain_threshold = 20._r8
@@ -1588,7 +1587,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , &
! calculate long growing season factor (lgsf)
! only begin to calculate a lgsf greater than 0.0 once the number
! of days active exceeds days/year.
- lgsf(p) = max(min(3.0_r8*(days_active(p)-leaf_long(ivt(p))*dayspyr )/dayspyr, 1._r8),0._r8)
+ lgsf(p) = max(min(3.0_r8*(days_active(p)-leaf_long(ivt(p))*avg_dayspyr )/avg_dayspyr, 1._r8),0._r8)
! RosieF. 5 Nov 2015. Changed this such that the increase in leaf turnover is faster after
! trees enter the 'fake evergreen' state. Otherwise, they have a whole year of
! cheating, with less litterfall than they should have, resulting in very high LAI.
@@ -1603,7 +1602,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , &
! calculate the background litterfall rate (bglfr)
! in units 1/s, based on leaf longevity (yrs) and correction for long growing season
- bglfr(p) = (1._r8/(leaf_long(ivt(p))*dayspyr*secspday))*lgsf(p)
+ bglfr(p) = (1._r8/(leaf_long(ivt(p))*avg_dayspyr*secspday))*lgsf(p)
end if
! set background transfer rate when active but not in the phenological onset period
@@ -1614,7 +1613,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , &
! in complete turnover of the storage pools in one year at steady state,
! once lgsf has reached 1.0 (after 730 days active).
- bgtr(p) = (1._r8/(dayspyr*secspday))*lgsf(p)
+ bgtr(p) = (1._r8/(avg_dayspyr*secspday))*lgsf(p)
! set carbon fluxes for shifting storage pools to transfer pools
@@ -1661,6 +1660,7 @@ subroutine CropPhenology(num_pcropp, filter_pcropp , &
! !USES:
use clm_time_manager , only : get_prev_calday, get_curr_days_per_year, is_beg_curr_year
+ use clm_time_manager , only : get_average_days_per_year
use pftconMod , only : ntmp_corn, nswheat, nwwheat, ntmp_soybean
use pftconMod , only : nirrig_tmp_corn, nirrig_swheat, nirrig_wwheat, nirrig_tmp_soybean
use pftconMod , only : ntrp_corn, nsugarcane, ntrp_soybean, ncotton, nrice
@@ -1696,7 +1696,8 @@ subroutine CropPhenology(num_pcropp, filter_pcropp , &
integer h ! hemisphere indices
integer s ! growing season indices
integer idpp ! number of days past planting
- real(r8) dayspyr ! days per year
+ real(r8) dayspyr ! days per year in this year
+ real(r8) avg_dayspyr ! average number of days per year
real(r8) crmcorn ! comparitive relative maturity for corn
real(r8) ndays_on ! number of days to fertilize
logical do_plant_normal ! are the normal planting rules defined and satisfied?
@@ -1763,6 +1764,7 @@ subroutine CropPhenology(num_pcropp, filter_pcropp , &
! get time info
dayspyr = get_curr_days_per_year()
+ avg_dayspyr = get_average_days_per_year()
jday = get_prev_calday()
if (use_fertilizer) then
@@ -2103,7 +2105,7 @@ subroutine CropPhenology(num_pcropp, filter_pcropp , &
else if (hui(p) >= huigrain(p)) then
cphase(p) = 3._r8
- bglfr(p) = 1._r8/(leaf_long(ivt(p))*dayspyr*secspday)
+ bglfr(p) = 1._r8/(leaf_long(ivt(p))*avg_dayspyr*secspday)
end if
! continue fertilizer application while in phase 2;
diff --git a/src/biogeophys/BalanceCheckMod.F90 b/src/biogeophys/BalanceCheckMod.F90
index ec928f8645..ff72bcb307 100644
--- a/src/biogeophys/BalanceCheckMod.F90
+++ b/src/biogeophys/BalanceCheckMod.F90
@@ -561,6 +561,7 @@ subroutine BalanceCheck( bounds, &
eflx_lh_tot => energyflux_inst%eflx_lh_tot_patch , & ! Input: [real(r8) (:) ] total latent heat flux (W/m**2) [+ to atm]
eflx_soil_grnd => energyflux_inst%eflx_soil_grnd_patch , & ! Input: [real(r8) (:) ] soil heat flux (W/m**2) [+ = into soil]
eflx_wasteheat_patch => energyflux_inst%eflx_wasteheat_patch , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2)
+ eflx_ventilation_patch => energyflux_inst%eflx_ventilation_patch , & ! Input: [real(r8) (:) ] sensible heat flux from building ventilation (W/m**2)
eflx_heat_from_ac_patch => energyflux_inst%eflx_heat_from_ac_patch , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2)
eflx_traffic_patch => energyflux_inst%eflx_traffic_patch , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2)
eflx_dynbal => energyflux_inst%eflx_dynbal_grc , & ! Input: [real(r8) (:) ] energy conversion flux due to dynamic land cover change(W/m**2) [+ to atm]
@@ -911,7 +912,8 @@ subroutine BalanceCheck( bounds, &
errseb(p) = sabv(p) + sabg(p) &
- eflx_lwrad_net(p) &
- eflx_sh_tot(p) - eflx_lh_tot(p) - eflx_soil_grnd(p) &
- + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p)
+ + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) &
+ + eflx_ventilation_patch(p)
end if
!TODO MV - move this calculation to a better place - does not belong in BalanceCheck
netrad(p) = fsa(p) - eflx_lwrad_net(p)
diff --git a/src/biogeophys/EnergyFluxType.F90 b/src/biogeophys/EnergyFluxType.F90
index 5634d26e50..685663b83d 100644
--- a/src/biogeophys/EnergyFluxType.F90
+++ b/src/biogeophys/EnergyFluxType.F90
@@ -8,7 +8,7 @@ module EnergyFluxType
use shr_kind_mod , only : r8 => shr_kind_r8
use shr_log_mod , only : errMsg => shr_log_errMsg
use clm_varcon , only : spval
- use clm_varctl , only : use_biomass_heat_storage
+ use clm_varctl , only : use_biomass_heat_storage, iulog
use decompMod , only : bounds_type
use LandunitType , only : lun
use ColumnType , only : col
@@ -63,9 +63,11 @@ module EnergyFluxType
real(r8), pointer :: eflx_anthro_patch (:) ! patch total anthropogenic heat flux (W/m**2)
real(r8), pointer :: eflx_traffic_patch (:) ! patch traffic sensible heat flux (W/m**2)
real(r8), pointer :: eflx_wasteheat_patch (:) ! patch sensible heat flux from domestic heating/cooling sources of waste heat (W/m**2)
+ real(r8), pointer :: eflx_ventilation_patch (:) ! patch sensible heat flux from building ventilation (W/m**2)
real(r8), pointer :: eflx_heat_from_ac_patch (:) ! patch sensible heat flux put back into canyon due to removal by AC (W/m**2)
real(r8), pointer :: eflx_traffic_lun (:) ! lun traffic sensible heat flux (W/m**2)
real(r8), pointer :: eflx_wasteheat_lun (:) ! lun sensible heat flux from domestic heating/cooling sources of waste heat (W/m**2)
+ real(r8), pointer :: eflx_ventilation_lun (:) ! lun sensible heat flux from building ventilation (W/m**2)
real(r8), pointer :: eflx_heat_from_ac_lun (:) ! lun sensible heat flux to be put back into canyon due to removal by AC (W/m**2)
real(r8), pointer :: eflx_building_lun (:) ! lun building heat flux from change in interior building air temperature (W/m**2)
real(r8), pointer :: eflx_urban_ac_lun (:) ! lun urban air conditioning flux (W/m**2)
@@ -155,7 +157,7 @@ subroutine Init(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_buildtemp
SHR_ASSERT_ALL_FL((ubound(t_grnd_col) == (/bounds%endc/)), sourcefile, __LINE__)
call this%InitAllocate ( bounds )
- call this%InitHistory ( bounds, is_simple_buildtemp )
+ call this%InitHistory ( bounds, is_simple_buildtemp, is_prog_buildtemp )
call this%InitCold ( bounds, t_grnd_col, is_simple_buildtemp, is_prog_buildtemp )
end subroutine Init
@@ -226,6 +228,7 @@ subroutine InitAllocate(this, bounds)
allocate( this%eflx_urban_ac_col (begc:endc)) ; this%eflx_urban_ac_col (:) = nan
allocate( this%eflx_urban_heat_col (begc:endc)) ; this%eflx_urban_heat_col (:) = nan
allocate( this%eflx_wasteheat_patch (begp:endp)) ; this%eflx_wasteheat_patch (:) = nan
+ allocate( this%eflx_ventilation_patch (begp:endp)) ; this%eflx_ventilation_patch (:) = nan
allocate( this%eflx_traffic_patch (begp:endp)) ; this%eflx_traffic_patch (:) = nan
allocate( this%eflx_heat_from_ac_patch (begp:endp)) ; this%eflx_heat_from_ac_patch (:) = nan
allocate( this%eflx_heat_from_ac_lun (begl:endl)) ; this%eflx_heat_from_ac_lun (:) = nan
@@ -234,6 +237,7 @@ subroutine InitAllocate(this, bounds)
allocate( this%eflx_urban_heat_lun (begl:endl)) ; this%eflx_urban_heat_lun (:) = nan
allocate( this%eflx_traffic_lun (begl:endl)) ; this%eflx_traffic_lun (:) = nan
allocate( this%eflx_wasteheat_lun (begl:endl)) ; this%eflx_wasteheat_lun (:) = nan
+ allocate( this%eflx_ventilation_lun (begl:endl)) ; this%eflx_ventilation_lun (:) = nan
allocate( this%eflx_anthro_patch (begp:endp)) ; this%eflx_anthro_patch (:) = nan
allocate( this%dgnetdT_patch (begp:endp)) ; this%dgnetdT_patch (:) = nan
@@ -276,7 +280,7 @@ subroutine InitAllocate(this, bounds)
end subroutine InitAllocate
!------------------------------------------------------------------------
- subroutine InitHistory(this, bounds, is_simple_buildtemp)
+ subroutine InitHistory(this, bounds, is_simple_buildtemp, is_prog_buildtemp)
!
! !DESCRIPTION:
! Setup fields that can be output to history files
@@ -293,6 +297,7 @@ subroutine InitHistory(this, bounds, is_simple_buildtemp)
class(energyflux_type) :: this
type(bounds_type), intent(in) :: bounds
logical , intent(in) :: is_simple_buildtemp ! If using simple building temp method
+ logical , intent(in) :: is_prog_buildtemp ! If using prognostic building temp method
!
! !LOCAL VARIABLES:
integer :: begp, endp
@@ -617,6 +622,13 @@ subroutine InitHistory(this, bounds, is_simple_buildtemp)
avgflag='A', long_name='sensible heat flux from heating/cooling sources of urban waste heat', &
ptr_patch=this%eflx_wasteheat_patch, set_nourb=0._r8, c2l_scale_type='urbanf')
+ if ( is_prog_buildtemp )then
+ this%eflx_ventilation_patch(begp:endp) = spval
+ call hist_addfld1d (fname='VENTILATION', units='W/m^2', &
+ avgflag='A', long_name='sensible heat flux from building ventilation', &
+ ptr_patch=this%eflx_ventilation_patch, set_nourb=0._r8, c2l_scale_type='urbanf')
+ end if
+
this%eflx_heat_from_ac_patch(begp:endp) = spval
call hist_addfld1d (fname='HEAT_FROM_AC', units='W/m^2', &
avgflag='A', long_name='sensible heat flux put into canyon due to heat removed from air conditioning', &
@@ -694,7 +706,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build
use landunit_varcon , only : istwet, istsoil, istdlak
use column_varcon , only : icol_road_imperv, icol_roof, icol_sunwall
use column_varcon , only : icol_shadewall, icol_road_perv
- use clm_varctl , only : iulog, use_vancouver, use_mexicocity
+ use clm_varctl , only : use_vancouver, use_mexicocity
implicit none
!
! !ARGUMENTS:
@@ -751,6 +763,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build
if (.not. lun%urbpoi(l)) then
this%eflx_traffic_lun(l) = spval
this%eflx_wasteheat_lun(l) = spval
+ this%eflx_ventilation_lun(l) = spval
if ( is_prog_buildtemp )then
this%eflx_building_lun(l) = 0._r8
this%eflx_urban_ac_lun(l) = 0._r8
@@ -758,6 +771,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build
end if
this%eflx_wasteheat_patch(p) = 0._r8
+ this%eflx_ventilation_patch(p) = 0._r8
this%eflx_heat_from_ac_patch(p) = 0._r8
this%eflx_traffic_patch(p) = 0._r8
if ( is_simple_buildtemp) &
@@ -767,6 +781,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build
this%eflx_building_lun(l) = 0._r8
this%eflx_urban_ac_lun(l) = 0._r8
this%eflx_urban_heat_lun(l) = 0._r8
+ this%eflx_ventilation_lun(l)= 0._r8
end if
end if
end do
@@ -861,6 +876,16 @@ subroutine Restart(this, bounds, ncid, flag, is_simple_buildtemp, is_prog_buildt
else
this%eflx_urban_heat_lun = 0.0_r8
end if
+ call restartvar(ncid=ncid, flag=flag, varname='EFLX_VENTILATION', xtype=ncd_double, &
+ dim1name='landunit', &
+ long_name='sensible heat flux from building ventilation', units='watt/m^2', &
+ interpinic_flag='interp', readvar=readvar, data=this%eflx_ventilation_lun)
+ if (flag=='read' .and. .not. readvar) then
+ if (masterproc) write(iulog,*) "can't find EFLX_VENTILATION in initial file..."
+ if (masterproc) write(iulog,*) "Initialize EFLX_VENTILATION to zero"
+ this%eflx_ventilation_lun(bounds%begl:bounds%endl) = 0._r8
+ end if
+
else if ( is_simple_buildtemp )then
call restartvar(ncid=ncid, flag=flag, varname='URBAN_AC', xtype=ncd_double, &
dim1name='column', &
@@ -972,7 +997,6 @@ subroutine UpdateAccVars (this, bounds)
use shr_const_mod , only : SHR_CONST_CDAY, SHR_CONST_TKFRZ
use clm_time_manager , only : get_step_size, get_nstep, is_end_curr_day, get_curr_date
use accumulMod , only : update_accum_field, extract_accum_field, accumResetVal
- use clm_varctl , only : iulog
use abortutils , only : endrun
!
! !ARGUMENTS:
diff --git a/src/biogeophys/HydrologyDrainageMod.F90 b/src/biogeophys/HydrologyDrainageMod.F90
index 4f9c549111..31ffc817a0 100644
--- a/src/biogeophys/HydrologyDrainageMod.F90
+++ b/src/biogeophys/HydrologyDrainageMod.F90
@@ -115,8 +115,7 @@ subroutine HydrologyDrainage(bounds, &
qflx_runoff => waterfluxbulk_inst%qflx_runoff_col , & ! total runoff (qflx_drain+qflx_surf+qflx_qrgwl) (mm H2O /s)
qflx_runoff_u => waterfluxbulk_inst%qflx_runoff_u_col , & ! Urban total runoff (qflx_drain+qflx_surf) (mm H2O /s)
qflx_runoff_r => waterfluxbulk_inst%qflx_runoff_r_col , & ! Rural total runoff (qflx_drain+qflx_surf+qflx_qrgwl) (mm H2O /s)
- qflx_ice_runoff_snwcp => waterfluxbulk_inst%qflx_ice_runoff_snwcp_col, & ! solid runoff from snow capping (mm H2O /s)
- qflx_sfc_irrig => waterfluxbulk_inst%qflx_sfc_irrig_col & ! surface irrigation flux (mm H2O /s)
+ qflx_ice_runoff_snwcp => waterfluxbulk_inst%qflx_ice_runoff_snwcp_col & ! solid runoff from snow capping (mm H2O /s)
)
! Determine time step and step size
@@ -217,9 +216,6 @@ subroutine HydrologyDrainage(bounds, &
qflx_runoff(c) = qflx_drain(c) + qflx_surf(c) + qflx_qrgwl(c) + qflx_drain_perched(c)
- if ((lun%itype(l)==istsoil .or. lun%itype(l)==istcrop) .and. col%active(c)) then
- qflx_runoff(c) = qflx_runoff(c) - qflx_sfc_irrig(c)
- end if
if (lun%urbpoi(l)) then
qflx_runoff_u(c) = qflx_runoff(c)
else if (lun%itype(l)==istsoil .or. lun%itype(l)==istcrop) then
diff --git a/src/biogeophys/SoilFluxesMod.F90 b/src/biogeophys/SoilFluxesMod.F90
index bb88042797..5f4030c6e1 100644
--- a/src/biogeophys/SoilFluxesMod.F90
+++ b/src/biogeophys/SoilFluxesMod.F90
@@ -115,6 +115,7 @@ subroutine SoilFluxes (bounds, num_urbanl, filter_urbanl, &
htvp => energyflux_inst%htvp_col , & ! Input: [real(r8) (:) ] latent heat of vapor of water (or sublimation) [j/kg]
eflx_building_heat_errsoi=> energyflux_inst%eflx_building_heat_errsoi_col , & ! Input: [real(r8) (:)] heat flux to interior surface of walls and roof for errsoi check (W m-2)
eflx_wasteheat_patch => energyflux_inst%eflx_wasteheat_patch , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2)
+ eflx_ventilation_patch => energyflux_inst%eflx_ventilation_patch , & ! Input: [real(r8) (:) ] sensible heat flux from building ventilation (W/m**2)
eflx_heat_from_ac_patch => energyflux_inst%eflx_heat_from_ac_patch , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2)
eflx_traffic_patch => energyflux_inst%eflx_traffic_patch , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2)
dlrad => energyflux_inst%dlrad_patch , & ! Input: [real(r8) (:) ] downward longwave radiation below the canopy [W/m2]
@@ -278,7 +279,8 @@ subroutine SoilFluxes (bounds, num_urbanl, filter_urbanl, &
eflx_soil_grnd(p) = sabg(p) + dlrad(p) &
- eflx_lwrad_net(p) - eflx_lwrad_del(p) &
- (eflx_sh_grnd(p) + qflx_evap_soi(p)*htvp(c) + qflx_tran_veg(p)*hvap) &
- + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p)
+ + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) &
+ + eflx_ventilation_patch(p)
eflx_soil_grnd_u(p) = eflx_soil_grnd(p)
end if
diff --git a/src/biogeophys/SoilTemperatureMod.F90 b/src/biogeophys/SoilTemperatureMod.F90
index ba4432cba2..513413e8a9 100644
--- a/src/biogeophys/SoilTemperatureMod.F90
+++ b/src/biogeophys/SoilTemperatureMod.F90
@@ -1434,7 +1434,7 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, &
use clm_varcon , only : sb, hvap
use column_varcon , only : icol_road_perv, icol_road_imperv
use clm_varpar , only : nlevsno, max_patch_per_col
- use UrbanParamsType, only : IsSimpleBuildTemp
+ use UrbanParamsType, only : IsSimpleBuildTemp, IsProgBuildTemp
!
! !ARGUMENTS:
implicit none
@@ -1506,6 +1506,7 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, &
dlrad => energyflux_inst%dlrad_patch , & ! Input: [real(r8) (:) ] downward longwave radiation blow the canopy [W/m2]
eflx_traffic => energyflux_inst%eflx_traffic_lun , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2)
eflx_wasteheat => energyflux_inst%eflx_wasteheat_lun , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2)
+ eflx_ventilation => energyflux_inst%eflx_ventilation_lun , & ! Input: [real(r8) (:) ] sensible heat flux from building ventilation (W/m**2)
eflx_heat_from_ac => energyflux_inst%eflx_heat_from_ac_lun , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2)
eflx_sh_snow => energyflux_inst%eflx_sh_snow_patch , & ! Input: [real(r8) (:) ] sensible heat flux from snow (W/m**2) [+ to atm]
eflx_sh_soil => energyflux_inst%eflx_sh_soil_patch , & ! Input: [real(r8) (:) ] sensible heat flux from soil (W/m**2) [+ to atm]
@@ -1513,6 +1514,7 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, &
eflx_sh_grnd => energyflux_inst%eflx_sh_grnd_patch , & ! Input: [real(r8) (:) ] sensible heat flux from ground (W/m**2) [+ to atm]
eflx_lwrad_net => energyflux_inst%eflx_lwrad_net_patch , & ! Input: [real(r8) (:) ] net infrared (longwave) rad (W/m**2) [+ = to atm]
eflx_wasteheat_patch => energyflux_inst%eflx_wasteheat_patch , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2)
+ eflx_ventilation_patch => energyflux_inst%eflx_ventilation_patch , & ! Input: [real(r8) (:) ] sensible heat flux from building ventilation (W/m**2)
eflx_heat_from_ac_patch => energyflux_inst%eflx_heat_from_ac_patch , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2)
eflx_traffic_patch => energyflux_inst%eflx_traffic_patch , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2)
eflx_anthro => energyflux_inst%eflx_anthro_patch , & ! Input: [real(r8) (:) ] total anthropogenic heat flux (W/m**2)
@@ -1581,11 +1583,19 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, &
! All wasteheat and traffic flux goes into canyon floor
if (col%itype(c) == icol_road_perv .or. col%itype(c) == icol_road_imperv) then
+ ! Note that we divide the following landunit variables by 1-wtlunit_roof which
+ ! essentially converts the flux from W/m2 of urban area to W/m2 of canyon floor area
eflx_wasteheat_patch(p) = eflx_wasteheat(l)/(1._r8-lun%wtlunit_roof(l))
+ if ( IsSimpleBuildTemp() ) then
+ eflx_ventilation_patch(p) = 0._r8
+ else if ( IsProgBuildTemp() ) then
+ eflx_ventilation_patch(p) = eflx_ventilation(l)/(1._r8-lun%wtlunit_roof(l))
+ end if
eflx_heat_from_ac_patch(p) = eflx_heat_from_ac(l)/(1._r8-lun%wtlunit_roof(l))
eflx_traffic_patch(p) = eflx_traffic(l)/(1._r8-lun%wtlunit_roof(l))
else
eflx_wasteheat_patch(p) = 0._r8
+ eflx_ventilation_patch(p) = 0._r8
eflx_heat_from_ac_patch(p) = 0._r8
eflx_traffic_patch(p) = 0._r8
end if
@@ -1594,7 +1604,8 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, &
eflx_gnet(p) = sabg(p) + dlrad(p) &
- eflx_lwrad_net(p) &
- (eflx_sh_grnd(p) + qflx_evap_soi(p)*htvp(c) + qflx_tran_veg(p)*hvap) &
- + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p)
+ + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) &
+ + eflx_ventilation_patch(p)
if ( IsSimpleBuildTemp() ) then
eflx_anthro(p) = eflx_wasteheat_patch(p) + eflx_traffic_patch(p)
end if
diff --git a/src/biogeophys/UrbBuildTempOleson2015Mod.F90 b/src/biogeophys/UrbBuildTempOleson2015Mod.F90
index 0ace2868d6..bf8b68c7eb 100644
--- a/src/biogeophys/UrbBuildTempOleson2015Mod.F90
+++ b/src/biogeophys/UrbBuildTempOleson2015Mod.F90
@@ -329,7 +329,8 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec,
eflx_building => energyflux_inst%eflx_building_lun , & ! Output: [real(r8) (:)] building heat flux from change in interior building air temperature (W/m**2)
eflx_urban_ac => energyflux_inst%eflx_urban_ac_lun , & ! Output: [real(r8) (:)] urban air conditioning flux (W/m**2)
- eflx_urban_heat => energyflux_inst%eflx_urban_heat_lun & ! Output: [real(r8) (:)] urban heating flux (W/m**2)
+ eflx_urban_heat => energyflux_inst%eflx_urban_heat_lun,& ! Output: [real(r8) (:)] urban heating flux (W/m**2)
+ eflx_ventilation => energyflux_inst%eflx_ventilation_lun & ! Output: [real(r8) (:)] sensible heat flux from building ventilation (W/m**2)
)
! Get step size
@@ -899,6 +900,14 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec,
write (iulog,*) 'clm model is stopping'
call endrun(subgrid_index=l, subgrid_level=subgrid_level_landunit)
end if
+
+ ! Sensible heat flux from ventilation. It is added as a flux to the canyon floor in SoilTemperatureMod.
+ ! Note that we multiply it here by wtlunit_roof which converts it from W/m2 of building area to W/m2
+ ! of urban area. eflx_urban_ac and eflx_urban_heat are treated similarly below. This flux is balanced
+ ! by an equal and opposite flux into/out of the building and so has a net effect of zero on the energy balance
+ ! of the urban landunit.
+ eflx_ventilation(l) = wtlunit_roof(l) * ( - ht_roof(l)*(vent_ach/3600._r8) &
+ * rho_dair(l) * cpair * (taf(l) - t_building(l)) )
end if
end do
diff --git a/src/main/clm_initializeMod.F90 b/src/main/clm_initializeMod.F90
index e02c7f58da..732a440daa 100644
--- a/src/main/clm_initializeMod.F90
+++ b/src/main/clm_initializeMod.F90
@@ -16,7 +16,7 @@ module clm_initializeMod
use clm_varctl , only : use_lch4, use_cn, use_cndv, use_c13, use_c14, use_fates
use clm_varctl , only : use_soil_moisture_streams
use clm_instur , only : wt_lunit, urban_valid, wt_nat_patch, wt_cft, fert_cft
- use clm_instur , only : irrig_method, wt_glc_mec, topo_glc_mec, haslake
+ use clm_instur , only : irrig_method, wt_glc_mec, topo_glc_mec, haslake, pct_urban_max
use perf_mod , only : t_startf, t_stopf
use readParamsMod , only : readParameters
use ncdio_pio , only : file_desc_t
@@ -122,7 +122,7 @@ subroutine initialize2(ni,nj)
use clm_varctl , only : use_cn, use_fates
use clm_varctl , only : use_crop, ndep_from_cpl, fates_spitfire_mode
use clm_varorb , only : eccen, mvelpp, lambm0, obliqr
- use landunit_varcon , only : landunit_varcon_init, max_lunit
+ use landunit_varcon , only : landunit_varcon_init, max_lunit, numurbl
use pftconMod , only : pftcon
use decompInitMod , only : decompInit_clumps, decompInit_glcp
use domainMod , only : domain_check, ldomain, domain_init
@@ -215,6 +215,7 @@ subroutine initialize2(ni,nj)
allocate (wt_glc_mec (begg:endg, maxpatch_glc ))
allocate (topo_glc_mec (begg:endg, maxpatch_glc ))
allocate (haslake (begg:endg ))
+ allocate (pct_urban_max(begg:endg, numurbl ))
! Read list of Patches and their corresponding parameter values
! Independent of model resolution, Needs to stay before surfrd_get_data
@@ -288,7 +289,8 @@ subroutine initialize2(ni,nj)
! Deallocate surface grid dynamic memory for variables that aren't needed elsewhere.
! Some things are kept until the end of initialize2; urban_valid is kept through the
- ! end of the run for error checking.
+ ! end of the run for error checking, pct_urban_max is kept through the end of the run
+ ! for reweighting in subgridWeights.
deallocate (wt_lunit, wt_cft, wt_glc_mec, haslake)
! Determine processor bounds and clumps for this processor
diff --git a/src/main/clm_varsur.F90 b/src/main/clm_varsur.F90
index 41740f1e2b..d360941d23 100644
--- a/src/main/clm_varsur.F90
+++ b/src/main/clm_varsur.F90
@@ -48,6 +48,10 @@ module clm_instur
! whether we have lake to initialise in each grid cell
logical , pointer :: haslake(:)
+
+ ! whether we have urban to initialize in each grid cell
+ ! (second dimension goes 1:numurbl)
+ real(r8), pointer :: pct_urban_max(:,:)
!-----------------------------------------------------------------------
end module clm_instur
diff --git a/src/main/subgridMod.F90 b/src/main/subgridMod.F90
index 42a3bb0fb9..645d02a603 100644
--- a/src/main/subgridMod.F90
+++ b/src/main/subgridMod.F90
@@ -39,6 +39,7 @@ module subgridMod
public :: subgrid_get_info_crop
public :: crop_patch_exists ! returns true if the given crop patch should be created in memory
public :: lake_landunit_exists ! returns true if the lake landunit should be created in memory
+ public :: urban_landunit_exists ! returns true if the urban landunit should be created in memory
! !PRIVATE MEMBER FUNCTIONS:
private :: subgrid_get_info_urban
@@ -348,6 +349,10 @@ subroutine subgrid_get_info_urban(gi, ltype, npatches, ncols, nlunits)
!
! In either case, for simplicity, we always allocate space for all columns on any
! allocated urban landunits.
+
+ ! For dynamic urban: to improve efficiency, 'PCT_URBAN_MAX' is added in landuse.timeseries
+ ! that tells if any urban landunit ever grows in a given grid cell in a transient
+ ! run. The urban landunit is allocated only if PCT_URBAN_MAX is greater than 0. (#1572)
if (run_zero_weight_urban) then
if (urban_valid(gi)) then
@@ -356,11 +361,11 @@ subroutine subgrid_get_info_urban(gi, ltype, npatches, ncols, nlunits)
this_landunit_exists = .false.
end if
else
- if (wt_lunit(gi, ltype) > 0.0_r8) then
- this_landunit_exists = .true.
- else
- this_landunit_exists = .false.
- end if
+ if (urban_landunit_exists(gi, ltype)) then
+ this_landunit_exists = .true.
+ else
+ this_landunit_exists = .false.
+ end if
end if
if (this_landunit_exists) then
@@ -599,4 +604,50 @@ function lake_landunit_exists(gi) result(exists)
end function lake_landunit_exists
+!-----------------------------------------------------------------------
+ function urban_landunit_exists(gi, ltype) result(exists)
+ !
+ ! !DESCRIPTION:
+ ! Returns true if a landunit for urban should be created in memory
+ ! which is defined for gridcells which will grow urban, given by pct_urban_max
+ !
+ ! !USES:
+ use dynSubgridControlMod , only : get_do_transient_urban
+ use clm_instur , only : pct_urban_max
+ use landunit_varcon , only : isturb_MIN
+ !
+ ! !ARGUMENTS:
+ logical :: exists ! function result
+ integer, intent(in) :: gi ! grid cell index
+ integer, intent(in) :: ltype !landunit type (isturb_tbd, etc.)
+ !
+ ! !LOCAL VARIABLES:
+ integer :: dens_index ! urban density type index
+
+ character(len=*), parameter :: subname = 'urban_landunit_exists'
+ !-----------------------------------------------------------------------
+
+ if (get_do_transient_urban()) then
+ ! To support dynamic landunits, we initialize an urban land unit in each grid cell
+ ! in which there are urban. This is defined by the pct_urban_max variable.
+
+ dens_index = ltype - isturb_MIN + 1
+ if (pct_urban_max(gi,dens_index) > 0.0_r8) then
+ exists = .true.
+ else
+ exists = .false.
+ end if
+
+ else
+ ! For a run without transient urban, only allocate memory for urban land units
+ ! actually present in run.
+ if (wt_lunit(gi, ltype) > 0.0_r8) then
+ exists = .true.
+ else
+ exists = .false.
+ end if
+ end if
+
+ end function urban_landunit_exists
+
end module subgridMod
diff --git a/src/main/subgridWeightsMod.F90 b/src/main/subgridWeightsMod.F90
index 224155914c..94c7fec504 100644
--- a/src/main/subgridWeightsMod.F90
+++ b/src/main/subgridWeightsMod.F90
@@ -301,6 +301,7 @@ logical function is_active_l(l, glc_behavior)
!
! !USES:
use landunit_varcon, only : istsoil, istice, isturb_MIN, isturb_MAX, istdlak
+ use clm_instur , only : pct_urban_max
!
! !ARGUMENTS:
implicit none
@@ -309,6 +310,7 @@ logical function is_active_l(l, glc_behavior)
!
! !LOCAL VARIABLES:
integer :: g ! grid cell index
+ integer :: dens_index ! urban density index
!------------------------------------------------------------------------
if (all_active) then
@@ -334,8 +336,14 @@ logical function is_active_l(l, glc_behavior)
is_active_l = .true.
end if
- if ((lun%itype(l) >= isturb_MIN .and. lun%itype(l) <= isturb_MAX) .and. &
- run_zero_weight_urban) then
+ ! Set urban land units to active, as long as memory has been allocated for such land units, either
+ ! through the run_zero_weight_urban setting which runs all urban landunits in each grid cell or
+ ! through pct_urban_max which is the maximum percent urban for each density type in a transient run.
+ ! (See subgridMod.F90 for this logic).
+ ! By doing this, urban land units are also run virtually in grid cells which will grow
+ ! urban during the transient run.
+
+ if ( (lun%itype(l) >= isturb_MIN .and. lun%itype(l) <= isturb_MAX) ) then
is_active_l = .true.
end if
diff --git a/src/main/surfrdMod.F90 b/src/main/surfrdMod.F90
index 4e121a2c54..27da9e24df 100644
--- a/src/main/surfrdMod.F90
+++ b/src/main/surfrdMod.F90
@@ -75,10 +75,11 @@ subroutine surfrd_get_data (begg, endg, ldomain, lfsurdat, actual_numcft)
n_dom_landunits
use fileutils , only : getfil
use domainMod , only : domain_type, domain_init, domain_clean
- use clm_instur , only : wt_lunit, topo_glc_mec
+ use clm_instur , only : wt_lunit, topo_glc_mec, pct_urban_max
use landunit_varcon , only : max_lunit, istsoil, isturb_MIN, isturb_MAX
use dynSubgridControlMod, only : get_flanduse_timeseries
use dynSubgridControlMod, only : get_do_transient_lakes
+ use dynSubgridControlMod, only : get_do_transient_urban
!
! !ARGUMENTS:
@@ -237,6 +238,15 @@ subroutine surfrd_get_data (begg, endg, ldomain, lfsurdat, actual_numcft)
call surfrd_lakemask(begg, endg)
end if
+ ! read the urbanmask (necessary for initialization of dynamical urban)
+ if (get_do_transient_urban()) then
+ call surfrd_urbanmask(begg, endg)
+ else
+ ! Set this to zero here. pct_urban_max is used in subgridWeightsMod to check
+ ! whether urban landunits should be run virtually.
+ pct_urban_max(:,:) = 0._r8
+ end if
+
end subroutine surfrd_get_data
!-----------------------------------------------------------------------
@@ -792,5 +802,59 @@ subroutine surfrd_lakemask(begg, endg)
end subroutine surfrd_lakemask
+ !-----------------------------------------------------------------------
+ subroutine surfrd_urbanmask(begg, endg)
+ !
+ ! !DESCRIPTION:
+ ! Reads the urban mask, indicating where urban areas are and will grow
+ ! of the landuse.timeseries file.
+ ! Necessary for the initialization of the urban land units.
+ ! All urban density types will intialize if any type exists or will grow.
+ !
+ ! !USES:
+ use clm_instur , only : pct_urban_max
+ use dynSubgridControlMod , only : get_flanduse_timeseries
+ use clm_varctl , only : fname_len
+ use fileutils , only : getfil
+ !
+ ! !ARGUMENTS:
+ integer, intent(in) :: begg, endg
+ !
+ !
+ ! !LOCAL VARIABLES:
+ type(file_desc_t) :: ncid_dynuse ! netcdf id for landuse timeseries file
+ character(len=256) :: locfn ! local file name
+ character(len=fname_len) :: fdynuse ! landuse.timeseries filename
+ logical :: readvar
+ !
+ character(len=*), parameter :: subname = 'surfrd_urbanmask'
+ !
+ !-----------------------------------------------------------------------
+
+ ! get filename of landuse_timeseries file
+ fdynuse = get_flanduse_timeseries()
+
+ if (masterproc) then
+ write(iulog,*) 'Attempting to read landuse.timeseries data .....'
+ if (fdynuse == ' ') then
+ write(iulog,*)'fdynuse must be specified'
+ call endrun(msg=errMsg(sourcefile, __LINE__))
+ end if
+ end if
+
+ call getfil(fdynuse, locfn, 0 )
+
+ ! open landuse_timeseries file
+ call ncd_pio_openfile (ncid_dynuse, trim(locfn), 0)
+
+ ! read the urbanmask
+ call ncd_io(ncid=ncid_dynuse, varname='PCT_URBAN_MAX', flag='read', data=pct_urban_max, &
+ dim1name=grlnd, readvar=readvar)
+ if (.not. readvar) call endrun( msg=' ERROR: PCT_URBAN_MAX is not on landuse.timeseries file'//errMsg(sourcefile, __LINE__))
+
+ ! close landuse_timeseries file again
+ call ncd_pio_closefile(ncid_dynuse)
+ end subroutine surfrd_urbanmask
+
end module surfrdMod
diff --git a/src/soilbiogeochem/SoilBiogeochemDecompCascadeBGCMod.F90 b/src/soilbiogeochem/SoilBiogeochemDecompCascadeBGCMod.F90
index 99a736a382..114240393b 100644
--- a/src/soilbiogeochem/SoilBiogeochemDecompCascadeBGCMod.F90
+++ b/src/soilbiogeochem/SoilBiogeochemDecompCascadeBGCMod.F90
@@ -516,7 +516,7 @@ subroutine decomp_rate_constants_bgc(bounds, num_soilc, filter_soilc, &
! written by C. Koven based on original CLM4 decomposition cascade
!
! !USES:
- use clm_time_manager , only : get_curr_days_per_year
+ use clm_time_manager , only : get_average_days_per_year
use shr_const_mod , only : SHR_CONST_PI
use clm_varcon , only : secspday
!
@@ -592,7 +592,7 @@ subroutine decomp_rate_constants_bgc(bounds, num_soilc, filter_soilc, &
errMsg(sourcefile, __LINE__))
endif
- days_per_year = get_curr_days_per_year()
+ days_per_year = get_average_days_per_year()
! set "Q10" parameter
Q10 = CNParamsShareInst%Q10
diff --git a/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90 b/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90
index 677aa4b04d..6e7e6c4566 100644
--- a/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90
+++ b/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90
@@ -760,7 +760,7 @@ subroutine decomp_rates_mimics(bounds, num_soilc, filter_soilc, &
! decomposition cascade model
!
! !USES:
- use clm_time_manager , only : get_curr_days_per_year
+ use clm_time_manager , only : get_average_days_per_year
use clm_varcon , only : secspday, secsphr, tfrz
use clm_varcon , only : g_to_mg, cm3_to_m3
!
@@ -873,7 +873,7 @@ subroutine decomp_rates_mimics(bounds, num_soilc, filter_soilc, &
mino2lim = CNParamsShareInst%mino2lim
- days_per_year = get_curr_days_per_year()
+ days_per_year = get_average_days_per_year()
! ! Set "decomp_depth_efolding" parameter
! decomp_depth_efolding = CNParamsShareInst%decomp_depth_efolding
diff --git a/src/utils/clm_time_manager.F90 b/src/utils/clm_time_manager.F90
index 4ab69c51aa..bbf3d8bb29 100644
--- a/src/utils/clm_time_manager.F90
+++ b/src/utils/clm_time_manager.F90
@@ -38,6 +38,7 @@ module clm_time_manager
get_prev_calday, &! return calendar day at beginning of current timestep
get_calday, &! return calendar day from input date
get_calendar, &! return calendar
+ get_average_days_per_year,&! return the average number of days per year for the given calendar
get_curr_days_per_year, &! return the days per year for year as of the end of the current time step
get_prev_days_per_year, &! return the days per year for year as of the beginning of the current time step
get_curr_yearfrac, &! return the fractional position in the current year, as of the end of the current timestep
@@ -123,6 +124,9 @@ module clm_time_manager
private :: TimeGetymd
private :: check_timemgr_initialized
+ character(len=*), parameter, private :: sourcefile = &
+ __FILE__
+
!=========================================================================================
contains
!=========================================================================================
@@ -1258,6 +1262,63 @@ end function get_calendar
!=========================================================================================
+ real(r8) function get_average_days_per_year()
+
+ !---------------------------------------------------------------------------------
+ ! Get the average number of days per year for the given calendar.
+ !
+ ! This should be used, for example, when converting a parameter from units of
+ ! per-year to units of per-second (so that the parameter will have a fixed, constant
+ ! value rather than a slightly different value on leap years vs. non-leap years).
+
+ real(r8) :: avg_days_per_year
+ real(r8) :: curr_days_per_year
+
+ real(r8), parameter :: days_per_year_noleap = 365._r8
+
+ ! From the definition of ESMF_CALKIND_GREGORIAN in
+ ! https://earthsystemmodeling.org/docs/release/latest/ESMF_refdoc/node6.html: "In the
+ ! Gregorian calendar every fourth year is a leap year in which February has 29 and not
+ ! 28 days; however, years divisible by 100 are not leap years unless they are also
+ ! divisible by 400." This results in an average number of days per year of 365.2425.
+ real(r8), parameter :: days_per_year_gregorian = 365.2425_r8
+
+ character(len=*), parameter :: subname = 'get_average_days_per_year'
+ !---------------------------------------------------------------------------------
+
+ ! BUG(wjs, 2022-02-01, ESCOMP/CTSM#1624) Ideally we would use ESMF_CalendarGet here,
+ ! but that currently isn't possible (see notes in issue 1624 for details)
+ if (to_upper(calendar) == NO_LEAP_C) then
+ avg_days_per_year = days_per_year_noleap
+ else if (to_upper(calendar) == GREGORIAN_C) then
+ avg_days_per_year = days_per_year_gregorian
+ else
+ call shr_sys_abort(subname//' ERROR: unrecognized calendar specified= '//trim(calendar))
+ end if
+
+ ! Paranoia: Since we're using a hard-coded value, let's make sure that the user hasn't
+ ! done some customizations to the calendar that change the days per year from what we
+ ! expect: Compare the hard-coded value with the number of days per year in the
+ ! current year, which comes from the actual ESMF calendar; the two should be close.
+ ! (This check can be removed once we address issue 1624, making the results of this
+ ! function depend on the actual ESMF calendar instead of a hard-coded value.)
+ curr_days_per_year = get_curr_days_per_year()
+ if (abs(avg_days_per_year - curr_days_per_year) > 1._r8) then
+ write(iulog,*) 'ERROR: hard-coded average days per year differs by more than expected'
+ write(iulog,*) 'from current days per year. Are you using a non-standard calendar?'
+ write(iulog,*) 'avg_days_per_year (hard-coded) = ', avg_days_per_year
+ write(iulog,*) 'curr_days_per_year (from ESMF calendar) = ', curr_days_per_year
+ write(iulog,*) 'You can fix this by changing the hard-coded parameters in '//subname
+ write(iulog,*) 'in file: '//sourcefile
+ call shr_sys_abort(subname//' ERROR: hard-coded average days per year differs by more than expected')
+ end if
+
+ get_average_days_per_year = avg_days_per_year
+
+ end function get_average_days_per_year
+
+ !=========================================================================================
+
integer function get_curr_days_per_year( offset )
!---------------------------------------------------------------------------------
diff --git a/src/utils/clmfates_interfaceMod.F90 b/src/utils/clmfates_interfaceMod.F90
index 6b4ad9d6e7..2191f07b9d 100644
--- a/src/utils/clmfates_interfaceMod.F90
+++ b/src/utils/clmfates_interfaceMod.F90
@@ -70,9 +70,9 @@ module CLMFatesInterfaceMod
use clm_varpar , only : numrad
use clm_varpar , only : ivis
use clm_varpar , only : inir
- use clm_varpar , only : nlevgrnd
use clm_varpar , only : nlevdecomp
use clm_varpar , only : nlevdecomp_full
+ use clm_varpar , only : nlevsoi
use PhotosynthesisMod , only : photosyns_type
use atm2lndType , only : atm2lnd_type
use SurfaceAlbedoType , only : surfalb_type
@@ -122,7 +122,6 @@ module CLMFatesInterfaceMod
use EDTypesMod , only : ed_patch_type
use PRTGenericMod , only : num_elements
- use FatesInterfaceTypesMod, only : hlm_numlevgrnd
use FatesInterfaceTypesMod, only : hlm_stepsize
use EDMainMod , only : ed_ecosystem_dynamics
use EDMainMod , only : ed_update_site
@@ -286,7 +285,7 @@ subroutine CLMFatesGlobals()
call set_fates_ctrlparms('vis_sw_index',ival=ivis)
call set_fates_ctrlparms('nir_sw_index',ival=inir)
- call set_fates_ctrlparms('num_lev_ground',ival=nlevgrnd)
+ call set_fates_ctrlparms('num_lev_soil',ival=nlevsoi)
call set_fates_ctrlparms('hlm_name',cval='CLM')
call set_fates_ctrlparms('hio_ignore_val',rval=spval)
call set_fates_ctrlparms('soilwater_ipedof',ival=get_ipedof(0))
@@ -2571,8 +2570,7 @@ subroutine init_history_io(this,bounds_proc)
use histFileMod, only : hist_addfld1d, hist_addfld2d, hist_addfld_decomp
use FatesConstantsMod, only : fates_short_string_length, fates_long_string_length
- use FatesIOVariableKindMod, only : patch_r8, patch_ground_r8, patch_size_pft_r8
- use FatesIOVariableKindMod, only : site_r8, site_ground_r8, site_size_pft_r8
+ use FatesIOVariableKindMod, only : site_r8, site_soil_r8, site_size_pft_r8
use FatesIOVariableKindMod, only : site_size_r8, site_pft_r8, site_age_r8
use FatesIOVariableKindMod, only : site_coage_r8, site_coage_pft_r8
use FatesIOVariableKindMod, only : site_fuel_r8, site_cwdsc_r8, site_scag_r8
@@ -2669,13 +2667,6 @@ subroutine init_history_io(this,bounds_proc)
ioname = trim(fates_hist%dim_kinds(dk_index)%name)
select case(trim(ioname))
- case(patch_r8)
- call hist_addfld1d(fname=trim(vname),units=trim(vunits), &
- avgflag=trim(vavgflag),long_name=trim(vlong), &
- ptr_patch=fates_hist%hvars(ivar)%r81d, &
- default=trim(vdefault), &
- set_lake=0._r8,set_urb=0._r8)
-
case(site_r8)
call hist_addfld1d(fname=trim(vname),units=trim(vunits), &
avgflag=trim(vavgflag),long_name=trim(vlong), &
@@ -2683,18 +2674,7 @@ subroutine init_history_io(this,bounds_proc)
default=trim(vdefault), &
set_lake=0._r8,set_urb=0._r8)
- case(patch_ground_r8, patch_size_pft_r8)
-
- d_index = fates_hist%dim_kinds(dk_index)%dim2_index
- dim2name = fates_hist%dim_bounds(d_index)%name
- call hist_addfld2d(fname=trim(vname),units=trim(vunits), & ! <--- addfld2d
- type2d=trim(dim2name), & ! <--- type2d
- avgflag=trim(vavgflag),long_name=trim(vlong), &
- ptr_patch=fates_hist%hvars(ivar)%r82d, &
- default=trim(vdefault))
-
-
- case(site_ground_r8, site_size_pft_r8, site_size_r8, site_pft_r8, &
+ case(site_soil_r8, site_size_pft_r8, site_size_r8, site_pft_r8, &
site_age_r8, site_height_r8, site_coage_r8,site_coage_pft_r8, &
site_fuel_r8, site_cwdsc_r8, &
site_can_r8,site_cnlf_r8, site_cnlfpft_r8, site_scag_r8, &
@@ -2991,7 +2971,7 @@ subroutine hlm_bounds_to_fates_bounds(hlm, fates)
use FatesLitterMod, only : ncwd
use EDtypesMod, only : nlevleaf, nclmax
use FatesInterfaceTypesMod, only : numpft_fates => numpft
- use clm_varpar, only : nlevgrnd
+
implicit none
@@ -3003,14 +2983,11 @@ subroutine hlm_bounds_to_fates_bounds(hlm, fates)
fates%cohort_begin = hlm%begcohort
fates%cohort_end = hlm%endcohort
- fates%patch_begin = hlm%begp
- fates%patch_end = hlm%endp
-
fates%column_begin = hlm%begc
fates%column_end = hlm%endc
-
- fates%ground_begin = 1
- fates%ground_end = nlevgrnd
+
+ fates%soil_begin = 1
+ fates%soil_end = nlevsoi
fates%sizepft_class_begin = 1
fates%sizepft_class_end = nlevsclass * numpft_fates
diff --git a/test/tools/README b/test/tools/README
index cb5dcdec34..ed96fb4670 100644
--- a/test/tools/README
+++ b/test/tools/README
@@ -29,6 +29,10 @@ release tests
qcmd -l walltime=10:00:00 -- env CLM_INPUT_TESTS=`pwd`/tests_posttag_nompi_regression \
./test_driver.sh -i >& run.out &
+To run neon-specific tests, please use login nodes:
+env CLM_INPUT_TESTS=`pwd`/tests_pretag_nompi_neon ./test_driver.sh -i > & run.out &
+
+
Intended for use on NCAR machines cheyenne, geyser (DAV) and hobart.
II. RUNNING test_driver.sh TOOLS TESTING:
diff --git a/test/tools/README.testnames b/test/tools/README.testnames
index 58222d9333..11d9e23d4c 100644
--- a/test/tools/README.testnames
+++ b/test/tools/README.testnames
@@ -42,8 +42,10 @@ m is the resolution
8 -- US-UMB
9 -- 4x5
a -- NEON YELL
-c -- US-UMB with cycling on forcing and transient use-case
-g -- US-UMB with global forcing and grid PFT and soil
+b -- NEON KONA
+d -- region1
+c -- single point from the 0.9x1.25 grid
+g -- unused
y -- 1.9x2.5 with transient 1850-2100 for rcp=2.6 and glacier-MEC on
T -- 1x1_numaIA
Z -- 10x15 with crop on
diff --git a/test/tools/TSMscript_tools.sh b/test/tools/TSMscript_tools.sh
index dbd75f4959..943fec97f2 100755
--- a/test/tools/TSMscript_tools.sh
+++ b/test/tools/TSMscript_tools.sh
@@ -43,9 +43,6 @@ if [ $? -ne 0 ]; then
fi
cd ${rundir}
-# Copy any sample files so can use them
-cp $cfgdir/sample_* $rundir
-
optfile=${3%^*}
cfgfile=${3#*^}
@@ -63,7 +60,7 @@ else
tcbtools="$rundir"
fi
-scopts=`cat ${CLM_SCRIPTDIR}/nl_files/$optfile | sed -e "s|CSMDATA|$CSMDATA|g" | sed -e "s|EXEDIR|$tcbtools|" | sed -e "s|CFGDIR|$cfgdir|g"`
+scopts=`cat ${CLM_SCRIPTDIR}/nl_files/$optfile | sed -e "s|CSMDATA|$CSMDATA|g" | sed -e "s|EXEDIR|$tcbtools|g" | sed -e "s|CFGDIR|$cfgdir|g"`
scopts=`echo $scopts | sed -e "s|CTSM_ROOT|$CTSM_ROOT|g" | sed -e "s|CIME_ROOT|$CIME_ROOT|g"`
echo "TSMscript_tools.sh: running ${cfgdir}/$2 with $scopts; output in ${rundir}/test.log"
diff --git a/test/tools/input_tests_master b/test/tools/input_tests_master
index 51cfa3a2fc..f3e46d50b5 100644
--- a/test/tools/input_tests_master
+++ b/test/tools/input_tests_master
@@ -36,8 +36,16 @@ bliT2 TBLscript_tools.sh mksurfdata_map mksurfdata.pl mksrfdt_1x1_numaIA_crp_SSP
sm0a1 TSMscript_tools.sh site_and_regional run_neon.py run_neon_OSBS
bl0a1 TBLscript_tools.sh site_and_regional run_neon.py run_neon_OSBS
-smba1 TSMscript_tools.sh site_and_regional subset_data.py subset_data_YELL
-blba1 TBLscript_tools.sh site_and_regional subset_data.py subset_data_YELL
+smba1 TSMscript_tools.sh site_and_regional subset_data subset_data_YELL
+blba1 TBLscript_tools.sh site_and_regional subset_data subset_data_YELL
+smbb1 TSMscript_tools.sh site_and_regional subset_data subset_data_KONA
+blbb1 TBLscript_tools.sh site_and_regional subset_data subset_data_KONA
+smb81 TSMscript_tools.sh site_and_regional subset_data subset_data_US-UMB
+blb81 TBLscript_tools.sh site_and_regional subset_data subset_data_US-UMB
+smbc1 TSMscript_tools.sh site_and_regional subset_data subset_data_f09_US_pt
+blbc1 TBLscript_tools.sh site_and_regional subset_data subset_data_f09_US_pt
+smbd1 TSMscript_tools.sh site_and_regional subset_data subset_data_region1
+blbd1 TBLscript_tools.sh site_and_regional subset_data subset_data_region1
smaa2 TSMscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL
blaa2 TBLscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL
@@ -48,12 +56,3 @@ smi59 TSMscript_tools.sh mkmapdata mkmapdata.sh mkmapdata_if10
bli59 TBLscript_tools.sh mkmapdata mkmapdata.sh mkmapdata_if10
smi79 TSMscript_tools.sh mkmapdata mkmapdata.sh mkmapdata_i1x1_brazil
bli79 TBLscript_tools.sh mkmapdata mkmapdata.sh mkmapdata_i1x1_brazil
-
-smiS4 TSMscript_tools.sh site_and_regional getregional_datasets.pl getregional
-bliS4 TBLscript_tools.sh site_and_regional getregional_datasets.pl getregional
-smiS8 TSMscript_tools.sh site_and_regional getregional_datasets.pl getregional_ndep
-bliS8 TBLscript_tools.sh site_and_regional getregional_datasets.pl getregional_ndep
-smiS9 TSMscript_tools.sh site_and_regional getregional_datasets.pl getregional_T62
-bliS9 TBLscript_tools.sh site_and_regional getregional_datasets.pl getregional_T62
-smiS0 TSMscript_tools.sh site_and_regional getregional_datasets.pl getregional_05popd
-bliS0 TBLscript_tools.sh site_and_regional getregional_datasets.pl getregional_05popd
diff --git a/test/tools/nl_files/getregional b/test/tools/nl_files/getregional
deleted file mode 100644
index 5e5d348e39..0000000000
--- a/test/tools/nl_files/getregional
+++ /dev/null
@@ -1 +0,0 @@
--SW 52,190 -NE 73,220 -i sample_inlist -o sample_outlist
diff --git a/test/tools/nl_files/getregional_05popd b/test/tools/nl_files/getregional_05popd
deleted file mode 100644
index 79747ad9cd..0000000000
--- a/test/tools/nl_files/getregional_05popd
+++ /dev/null
@@ -1 +0,0 @@
--SW 52,190 -NE 73,220 -i sample_inlist_0.5popd -o sample_outlist_0.5popd
diff --git a/test/tools/nl_files/getregional_T62 b/test/tools/nl_files/getregional_T62
deleted file mode 100644
index 8288847cf5..0000000000
--- a/test/tools/nl_files/getregional_T62
+++ /dev/null
@@ -1 +0,0 @@
--SW 52,190 -NE 73,220 -i sample_inlist_T62 -o sample_outlist_T62
diff --git a/test/tools/nl_files/getregional_ndep b/test/tools/nl_files/getregional_ndep
deleted file mode 100644
index 125285f690..0000000000
--- a/test/tools/nl_files/getregional_ndep
+++ /dev/null
@@ -1 +0,0 @@
--SW 52,190 -NE 73,220 -i sample_inlist_ndep -o sample_outlist_ndep
diff --git a/test/tools/nl_files/run_neon_OSBS b/test/tools/nl_files/run_neon_OSBS
index b45fc4ffe9..c49fb77783 100644
--- a/test/tools/nl_files/run_neon_OSBS
+++ b/test/tools/nl_files/run_neon_OSBS
@@ -1 +1 @@
-ad --case-root EXEDIR
+--verbose --run-type ad --setup-only
diff --git a/test/tools/nl_files/subset_data_KONA b/test/tools/nl_files/subset_data_KONA
new file mode 100644
index 0000000000..cb743f2b45
--- /dev/null
+++ b/test/tools/nl_files/subset_data_KONA
@@ -0,0 +1 @@
+point --lon 263.38956 --lat 39.1082 --site KONA --dompft 17 19 23 45 --pctpft 28 12 32 28 --crop --create-domain --create-surface --outdir EXEDIR/KONA_user-mod_and_data --user-mods-dir EXEDIR/KONA_user-mod_and_data --verbose
diff --git a/test/tools/nl_files/subset_data_US-UMB b/test/tools/nl_files/subset_data_US-UMB
new file mode 100644
index 0000000000..499b5f53fd
--- /dev/null
+++ b/test/tools/nl_files/subset_data_US-UMB
@@ -0,0 +1 @@
+point --lon 275.28626 --lat 45.5598 --site 1x1_US-UMB --dompft 7 --cap-saturation --uniform-snowpack --create-surface --outdir EXEDIR/US-UMB_user-mod_and_data --user-mods-dir EXEDIR/US-UMB_user-mod_and_data --verbose
diff --git a/test/tools/nl_files/subset_data_YELL b/test/tools/nl_files/subset_data_YELL
index 8e1dcbcbb2..5e142713df 100644
--- a/test/tools/nl_files/subset_data_YELL
+++ b/test/tools/nl_files/subset_data_YELL
@@ -1 +1 @@
-point --lon 249.45804 --lat 44.95597 --site YELL --crop --dompft 1 --outdir EXEDIR/
+point --lon 250.45804 --lat 44.95597 --site YELL --dompft 1 --crop --create-domain --create-surface --outdir EXEDIR/YELL_user-mod_and_data --user-mods-dir EXEDIR/YELL_user-mod_and_data --verbose
diff --git a/test/tools/nl_files/subset_data_f09_US_pt b/test/tools/nl_files/subset_data_f09_US_pt
new file mode 100644
index 0000000000..4acdfeabd4
--- /dev/null
+++ b/test/tools/nl_files/subset_data_f09_US_pt
@@ -0,0 +1 @@
+point --lon 257.5 --lat 43.822 --site 1x1_ --include-nonveg --crop --create-landuse --create-datm --create-user-mods --datm-syr 2000 --datm-eyr 2000 --create-surface --outdir EXEDIR/f09_US_pt_user-mod_and_data --user-mods-dir EXEDIR/f09_US_pt_user-mod_and_data --verbose
diff --git a/test/tools/nl_files/subset_data_region1 b/test/tools/nl_files/subset_data_region1
new file mode 100644
index 0000000000..c1c5607239
--- /dev/null
+++ b/test/tools/nl_files/subset_data_region1
@@ -0,0 +1 @@
+region --lat1 -40 --lat2 15 --lon1 275 --lon2 330 --create-domain --create-surface --create-landuse --verbose --overwrite --reg test1
diff --git a/test/tools/tests_posttag_nompi_regression b/test/tools/tests_posttag_nompi_regression
index 5b5d76fd60..1395aebe11 100644
--- a/test/tools/tests_posttag_nompi_regression
+++ b/test/tools/tests_posttag_nompi_regression
@@ -9,8 +9,3 @@ smi74 bli74
smi78 bli78
smiT4 bliT4
smiT2 bliT2
-smiS4 bliS4
-smiS8 bliS8
-smiS9 bliS9
-smiS0 bliS0
-smiS0 bliS0
diff --git a/test/tools/tests_pretag_cheyenne_nompi b/test/tools/tests_pretag_cheyenne_nompi
index fec9d08448..19e96594bf 100644
--- a/test/tools/tests_pretag_cheyenne_nompi
+++ b/test/tools/tests_pretag_cheyenne_nompi
@@ -1,9 +1,8 @@
smi79 bli79
smc#4 blc#4
smg54 blg54
-sm0a1 bl0a1
-smaa2 blaa2
smba1 blba1
+smbd1 blbd1
smi04 bli04
smi24 bli24
smi53 bli53
@@ -11,7 +10,6 @@ smi64 bli64
smi54 bli54
smi57 bli57
smi58 bli58
-smiS4 bliS4
smi74 bli74
smiT4 bliT4
smiT2 bliT2
diff --git a/test/tools/tests_pretag_nompi_neon b/test/tools/tests_pretag_nompi_neon
new file mode 100644
index 0000000000..43167e71c0
--- /dev/null
+++ b/test/tools/tests_pretag_nompi_neon
@@ -0,0 +1,7 @@
+sm0a1 bl0a1
+smaa2 blaa2
+smba1 blba1
+smbb1 blbb1
+smb81 blb81
+smbc1 blbc1
+smbd1 blbd1
diff --git a/tools/modify_fsurdat/fsurdat_modifier b/tools/modify_fsurdat/fsurdat_modifier
index fdf3d48756..8c2031b548 100755
--- a/tools/modify_fsurdat/fsurdat_modifier
+++ b/tools/modify_fsurdat/fsurdat_modifier
@@ -37,6 +37,7 @@ ncar_pylib
contains all the arguments needed by the script.
3) Run the script ./fsurdat_modifier pointing to the copied/modified .cfg file,
e.g. modify_users_copy.cfg
+4) Use the --verbose option to see progress output on your screen
Example
-------
diff --git a/tools/modify_fsurdat/modify_template.cfg b/tools/modify_fsurdat/modify_template.cfg
index 56e8221635..fa134d34e3 100644
--- a/tools/modify_fsurdat/modify_template.cfg
+++ b/tools/modify_fsurdat/modify_template.cfg
@@ -56,13 +56,16 @@ lnd_lon_2 = 360
# user-defined mask in a file, as alternative to setting lat/lon values
landmask_file = UNSET
-# Non-crop PFT to be set to 100% according to user-defined mask.
-# If idealized = True and dom_nat_pft = UNSET, the latter defaults to 0
-# (bare soil). Valid values 0 to 14 (int).
-dom_nat_pft = UNSET
+# PFT/CFT to be set to 100% according to user-defined mask.
+# If idealized = True and dom_plant = UNSET, the latter defaults to 0
+# (bare soil). Valid values range from 0 to a max value (int) that one can
+# obtain from the fsurdat_in file using ncdump (or method preferred by user).
+# The max valid value will equal (lsmpft - 1) and will also equal the last
+# value of cft(cft).
+dom_plant = UNSET
-# LAI, SAI, HEIGHT_TOP, and HEIGHT_BOT values by month for dom_nat_pft
-# If dom_nat_pft = 0, the next four default to 0 (space-delimited list
+# LAI, SAI, HEIGHT_TOP, and HEIGHT_BOT values by month for dom_plant
+# If dom_plant = 0, the next four default to 0 (space-delimited list
# of floats without brackets).
lai = UNSET
sai = UNSET
diff --git a/tools/site_and_regional/README b/tools/site_and_regional/README
index ca7507d7d6..930a07a7ac 100644
--- a/tools/site_and_regional/README
+++ b/tools/site_and_regional/README
@@ -10,14 +10,31 @@ ncar_pylib
Brief description of scripts:
-subset_data.py
- create regional domain, surface data, and rtm directional files by
- extracting data from global datasets
+subset_data
+ This script extracts domain files, surface dataset, and DATM files
+ at either a single point or a region using the global dataset.
+ For extracting domain files, surface dataset, and DATM files at a single point, use:
+ ./subset_data point
+
+ For extracting domain files, surface dataset, and DATM files at a region, use:
+ ./subset_data region
modify_singlept_site_neon.py
- After running subset_data.py overwrite some fields with site-specific
- data for neon sites
+ After running subset_data.py overwrite some fields with site-specific
+ data for neon sites.
+
+run_neon.py
+ Wrapper script for running CTSM simulations for one or more
+ neon sites for spin-up or transient run types.
+neon_surf_wrapper.py
+ Wrapper script that run subset_data to extract data for all neon points and then
+ use modify_singlept_site_neon.py to update site-specific fields.
+ This code uses neon_sites_dompft.csv to determine --dompft (dominant pft types) values.
+
+neon_s3_upload
+ Script to rename and upload NEON site finidat files to NEON s3 bucket
+ for use in transient startup cases
DEPRECATED SCRIPTS:
@@ -27,20 +44,8 @@ mknoocnmap.pl
Script to create unity mapping dataset for single-point
or regional studies over land-only (no ocean).
-getregional_datasets.pl
- Extract out regional datasets from global ones and put files in a location that
- can be used by build-namelist.
-
NCL Scripts available:
mkunitymap.ncl
NCL script to create a unity map -- ran by above script (mknoocnmap.pl)
-getregional_datasets.ncl
- NCL script to extract out regional datasets.
-
-Input datafiles:
-
-sample_infile* --- Sample datafiles with list of files for getregional_datasets.pl to operate on
-sample_outfile* -- Sample datafiles with list of files for getregional_datasets.pl to create
-
diff --git a/tools/site_and_regional/README.getregional b/tools/site_and_regional/README.getregional
deleted file mode 100644
index 56c1d0834d..0000000000
--- a/tools/site_and_regional/README.getregional
+++ /dev/null
@@ -1,35 +0,0 @@
-$CTSMROOT/tools/site_and_regional/README.getregional Erik Kluzek
- 06/08/2018
-
-Information on the getregional_datasets script.
-
-The getregional_datasets.pl script operates on global datasets and
-extracts out a regional box (or single point) within it.
-
-
-QUICKSTART:
-
-
-Here is how you would use the script to run a setup a simple case.
-
-1.) Create list of input global files you want to extract from.
-
-A sample file is: sample_inlist
-
-2.) Create list of regional files that will be created.
-
-A sample file is: sample_outlist
-
-3.) Run getregional
-
-set DIR=`pwd`
-./getregional_datasets.pl -ne 74,221 -sw 51,189 -i sample_inlist -o sample_outlist
-
-4.) Make sure the user_nl_clm and xmlchange_cmnds files are correct.
-
-getregional will create a user_nl_clm file and a xmlchange_cmnds script to set
-needed env_run settings.
-
-3.) Create your case using the user_mods_dir option and CLM_USRDAT resolution
-
-./create_newcase --res CLM_USRDAT --user_mods_dir $DIR --case testAlaska --compset I2000Clm50SpGs
diff --git a/tools/site_and_regional/default_data.cfg b/tools/site_and_regional/default_data.cfg
new file mode 100644
index 0000000000..f689c99044
--- /dev/null
+++ b/tools/site_and_regional/default_data.cfg
@@ -0,0 +1,28 @@
+[main]
+clmforcingindir = /glade/p/cesmdata/inputdata
+
+[datm_gswp3]
+dir = /glade/p/cgd/tss/CTSM_datm_forcing_data/atm_forcing.datm7.GSWP3.0.5d.v1.c170516
+domain = domain.lnd.360x720_gswp3.0v1.c170606.nc
+solardir = Solar
+precdir = Precip
+tpqwdir = TPHWL
+solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr.
+prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec.
+tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL.
+solarname = CLMGSWP3v1.Solar
+precname = CLMGSWP3v1.Precip
+tpqwname = CLMGSWP3v1.TPQW
+
+[surfdat]
+dir = lnd/clm2/surfdata_map/release-clm5.0.18
+surfdat_16pft = surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc
+surfdat_78pft = surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc
+
+[landuse]
+dir = lnd/clm2/surfdata_map/release-clm5.0.18
+landuse_16pft = landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc
+landuse_78pft = landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc
+
+[domain]
+file = share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc
diff --git a/tools/site_and_regional/getregional_datasets.ncl b/tools/site_and_regional/getregional_datasets.ncl
deleted file mode 100644
index a6da88c67a..0000000000
--- a/tools/site_and_regional/getregional_datasets.ncl
+++ /dev/null
@@ -1,268 +0,0 @@
-;
-; Extract out regional datasets needed to run clm from the global datasets.
-; NOTE: Requires at least NCL version 5.1.0 or later...
-;
-; Erik Kluzek
-; Aug/28/2009
-;
-load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl";
-
-procedure getfilecoord_namenlen( filenames[*]:string, dimnames[*]:string, dimlens[*]:integer, nlen:integer, name:string )
-;
-; get the name and size of either the latitude or longitude
-;
- local d, l
-begin
- if ( name .eq. "" )then
- do d = 0, dimsizes(filenames)-1
- if ( any(dimnames .eq. filenames(d) ) )then
- name = filenames(d)
- ; Get length of this dimension
- do l = 0, dimsizes(dimnames)-1
- if ( dimnames(l) .eq. name )then
- nlen = dimlens(l)
- end if
- end do
- end if
- end do
- end if
-end
-
-begin
- ; ===========================================================================================================
- ;
- ; IMPORTANT NOTE: EDIT THE FOLLOWING TO CUSTOMIZE or use ENV VARIABLE SETTINGS
- ; Edit the following as needed to interpolate to a new resolution.
- ;
- ; Input resolution and position
- ;
- latS = stringtodouble( getenv("S_LAT") ); ; Get south latitude from env variable
- latN = stringtodouble( getenv("N_LAT") ); ; Get north latitude from env variable
- lonE = stringtodouble( getenv("E_LON") ); ; Get east longitude from env variable
- lonW = stringtodouble( getenv("W_LON") ); ; Get west longitude from env variable
- debug_str = getenv("DEBUG"); ; Don't run just -- debug
- print_str = getenv("PRINT"); ; Do Extra printing for debugging
- gridfile = getenv("GRIDFILE"); ; Input global grid file
- nfiles = stringtointeger( getenv("NFILES") ); ; number of files to read in file lists
- filelistfil = getenv("INFILELIST"); ; filename of list of global files to work on
- regfilelistfil = getenv("OUTFILELIST"); ; filename of list of regional eiles to create
-
- if ( ismissing(nfiles) )then
- print( "NFILES is missing -- need to provide the number of files to process" );
- status_exit( -1 )
- end if
- if ( ismissing(filelistfil) .or. ismissing(regfilelistfil) )then
- print( "INFILELIST or OUTFILELIST is missing -- need to provide both" );
- status_exit( -1 )
- end if
- if ( ismissing(latS) )then
- latS = 52.0d00;
- end if
- if ( ismissing(latN) )then
- latN = 73.0d00;
- end if
- if ( ismissing(lonW) )then
- lonW = 190.0d00;
- end if
- if ( ismissing(lonE) )then
- lonE = 220.0d00;
- end if
- if ( ismissing(print_str) )then
- printn = False;
- else
- if ( print_str .eq. "TRUE" )then
- printn = True;
- else
- printn = False;
- end if
- end if
- if ( ismissing(debug_str) )then
- debug = False;
- else
- if ( debug_str .eq. "TRUE" )then
- print( "DEBUG is TRUE do extra printing AND do NOT execute -- just print what WOULD happen" );
- debug = True;
- printn = True;
- else
- debug = False;
- end if
- end if
- print( "Extract out regional datasets from global datasets" );
- if ( printn .eq. True )then
- print( "Regional: Latitude="+latS+"-"+latN+" Longitude="+lonW+"-"+lonE );
- end if
-
- ;
- ; Setup the namelist query script
- ;
- ldate = systemfunc( "date" );
- clmroot = getenv("CLM_ROOT");
-
- ;
- ; list of latitude and longitude names
- ;
- filelatnames = (/ "lsmlat", "lat", "nj" /);
- filelonnames = (/ "lsmlon", "lon", "ni" /);
-
- ;
- ; Open file
- ;
- if ( systemfunc("test -f "+gridfile+"; echo $?" ) .ne. 0 )then
- print( "Input gridfile does not exist or not found: "+gridfile );
- status_exit( -1 )
- end if
- if ( printn .eq. True )then
- print( "gridfile:"+gridfile );
- end if
- ncg = addfile( gridfile, "r" );
- ;
- ; Get the names for latitude/longitude on the grid file
- ;
- varnames = getfilevarnames( ncg );
- gridlonnm = ""
- gridlatnm = ""
- glat = 0
- glon = 0
- varlens = new( dimsizes(varnames), "integer" );
- getfilecoord_namenlen( (/ "yc", "LATIXY"/), varnames, varlens, glat, gridlatnm );
- getfilecoord_namenlen( (/ "xc", "LONGXY"/), varnames, varlens, glon, gridlonnm );
- delete( varnames );
- delete( varlens );
- if ( gridlatnm .eq. "" )then
- print( "Could not find a recognizable latitude dimension name" )
- status_exit(-1);
- end if
- if ( printn .eq. True )then
- print( "gridlatname = "+gridlatnm )
- print( "gridlonname = "+gridlonnm )
- end if
-
- gridlon = ncg->$gridlonnm$;
- gridlon = where( gridlon < 0.0, 360.0 + gridlon, gridlon );
-
- indx = region_ind ( (/ncg->$gridlatnm$/), (/gridlon/), latS, latN, lonW, lonE );
- ; Indexes into indices
- ilat0 = 0;
- ilatN = 1;
- ilon0 = 2;
- ilonN = 3;
-
- latdim = dimsizes(ncg->$gridlatnm$(:,0))
- londim = dimsizes(gridlon(0,:))
- if ( any( ismissing(indx)) )then
- print( "Indices:"+indx );
- print( "Missing indices found" );
- print( "nlat: "+latdim );
- print( "nlon: "+londim );
- print( "yc: "+ncg->$gridlatnm$(:,0) );
- print( "xc: "+gridlon(0,:) );
- status_exit(-1);
- end if
-
- if ( debug .eq. True )then
- print( "Indices:"+indx );
- end if
- if ( printn .eq. True )then
- print( "Full grid size: nlat = "+latdim+" nlon = "+londim )
- loclatdim = indx(ilatN) - indx(ilat0) + 1;
- loclondim = indx(ilonN) - indx(ilon0) + 1;
- print( "Grid size:"+loclatdim+"x"+loclondim );
- LOLAT = ncg->$gridlatnm$(indx(ilat0),indx(ilon0));
- HILAT = ncg->$gridlatnm$(indx(ilatN),indx(ilonN));
- print( "Actual grid span: Latitude="+LOLAT+"-"+HILAT );
- LOLON = gridlon(indx(ilat0),indx(ilon0));
- HILON = gridlon(indx(ilatN),indx(ilonN));
- print( "Actual grid span: Longitude="+LOLON+"-"+HILON );
- end if
-
- ;
- ; Read in the list of files
- ;
- filelist = asciiread(filelistfil(0), (/ nfiles /), "string");
- regfilelist = asciiread(regfilelistfil(0), (/ nfiles /), "string");
- ;
- ; Loop over each of the files to process...
- ;
- do i = 0, nfiles-1
- ;
- ; Get the filename of the input global file and the output regional filename
- ;
- globalfile = filelist(i)
- if ( systemfunc("test -f "+globalfile+"; echo $?" ) .ne. 0 )then
- print( "Input global "+globalfile+" file does not exist or not found: "+globalfile );
- status_exit(-1);
- end if
- if ( debug .eq. True )then
- print( "Process file: "+globalfile );
- end if
- regfile = regfilelist(i)
- if ( ismissing(regfile) )then
- print( "Output regional filename was NOT found: "+regfile );
- status_exit(-1);
- end if
-
- nc = addfile( globalfile, "r" );
- varnames = getfilevarnames( nc );
- filelonnm = ""
- filelatnm = ""
- nlat = 0
- nlon = 0
- do v = 0, dimsizes(varnames)-1
- dimnames = getfilevardims( nc, varnames(v) );
- dimlens = getfilevardimsizes( nc, varnames(v) );
- getfilecoord_namenlen( filelatnames, dimnames, dimlens, nlat, filelatnm );
- getfilecoord_namenlen( filelonnames, dimnames, dimlens, nlon, filelonnm );
- delete( dimnames );
- delete( dimlens );
- end do
- if ( filelatnm .eq. "" )then
- print( "Could not find a recognizable latitude dimension name" )
- status_exit(-1);
- end if
- if ( printn .eq. True )then
- print( "nlat = "+nlat+" nlon = "+nlon )
- end if
- ;
- ; Check to make sure number of latitudes and longitudes are the same as on the domain file
- ;
- if ( (latdim .ne. nlat) .or. (londim .ne. nlon) )then
- print( "Latitude or longitude dimensions do NOT match the grid file for file: "+globalfile );
- status_exit(-1);
- end if
- ;
- ; Run ncks on it over the region of interest
- ;
- do v = 0, dimsizes(varnames)-1
- cmd = "ncks -O -d "+filelatnm+","+indx(ilat0)+","+indx(ilatN)+" -d "+filelonnm+","+indx(ilon0)+","+indx(ilonN);
- cmd = cmd + " -v " + varnames(v) + " " + globalfile + " "+regfile+"_VAR"+varnames(v)+".nc"
- print( "Execute:"+cmd );
- if ( debug .eq. False )then
- if ( systemfunc( cmd+"; echo $?" ) .ne. 0 )then
- print( "Command did not complete successfully: " );
- status_exit( -1 )
- end if
- end if
- cmd = "ncks -A "+regfile+"_VAR"+varnames(v)+".nc "+regfile
- print( "Execute:"+cmd );
- if ( debug .eq. False )then
- if ( systemfunc( cmd+"; echo $?" ) .ne. 0 )then
- print( "Command did not complete successfully: " );
- status_exit( -1 )
- end if
- system( "/bin/rm "+regfile+"_VAR"+varnames(v)+".nc" )
- end if
- end do
- delete( varnames );
- if ( debug .eq. False )then
- ;
- ; Open up resultant file for writing
- ;
- nco = addfile( regfile, "w" );
- nco@history = nco@history + ":"+ldate + ": ";
- end if
- end do
-
- print( "================================================================================================" );
- print( "Successfully created regional datasets from global datasets" );
-
-end
diff --git a/tools/site_and_regional/getregional_datasets.pl b/tools/site_and_regional/getregional_datasets.pl
deleted file mode 100755
index 5fee1a1493..0000000000
--- a/tools/site_and_regional/getregional_datasets.pl
+++ /dev/null
@@ -1,375 +0,0 @@
-#!/usr/bin/env perl
-#=======================================================================
-#
-# Extract out regional datasets from the global datasets.
-#
-# Usage:
-#
-# getregional_datasets.pl
-#
-# Erik Kluzek
-# Aug/28/2009
-#
-#=======================================================================
-
-use Cwd;
-use strict;
-#use diagnostics;
-use English;
-use Getopt::Long;
-use IO::File;
-
-#-----------------------------------------------------------------------------------------------
-# Set the directory that contains this scripts. If the command was issued using a
-# relative or absolute path, that path is in $ProgDir. Otherwise assume the
-# command was issued from the current working directory.
-
-(my $ProgName = $0) =~ s!(.*)/!!; # name of this script
-my $ProgDir = $1; # name of directory containing this script -- may be a
- # relative or absolute path, or null if the script is in
- # the user's PATH
-my $cmdline = "@ARGV"; # Command line arguments to script
-my $cwd = getcwd(); # current working directory
-my $scrdir; # absolute pathname of directory that contains this script
-my $nm = "$ProgName::"; # name to use if script dies
-if ($ProgDir) {
- $scrdir = absolute_path($ProgDir);
-} else {
- $scrdir = $cwd;
-}
-
-my $gridfilename = "fatmlndfrc";
-
-#-----------------------------------------------------------------------------------------------
-
-sub usage {
- die < 90.0) ) {
- die <<"EOF";
-** $ProgName - Bad value for latitude (=$lat) for $desc **
-EOF
- }
- if ( ($lon < 0.) || ($lon > 360.0) ) {
- die <<"EOF";
-** $ProgName - Bad value for longitude (=$lat) for $desc **
-EOF
- }
- return( $lat, $lon );
-
-}
-
-#-----------------------------------------------------------------------------------------------
-
-# Process command-line options.
-
-my %opts = (
- SW_corner => undef,
- NE_corner => undef,
- infilelist => undef,
- outfilelist => undef,
- help => 0,
- verbose => 0,
- debug => 0,
- );
-GetOptions(
- "sw|SW_corner=s" => \$opts{'SW_corner'},
- "ne|NE_corner=s" => \$opts{'NE_corner'},
- "i|infilelist=s" => \$opts{'infilelist'},
- "o|outfilelist=s" => \$opts{'outfilelist'},
- "h|help" => \$opts{'help'},
- "d|debug" => \$opts{'debug'},
- "v|verbose" => \$opts{'verbose'},
-) or usage();
-
-# Give usage message.
-usage() if $opts{'help'};
-
-# Check for unparsed arguments
-if (@ARGV) {
- print "ERROR: unrecognized arguments: @ARGV\n";
- usage();
-}
-
-if ( ! defined($opts{'infilelist'}) || ! defined($opts{'outfilelist'}) ) {
- print "ERROR: MUST set both infilelist and outfilelist\n";
- usage();
-}
-if ( ! defined($opts{'SW_corner'}) || ! defined($opts{'NE_corner'}) ) {
- print "ERROR: MUST set both SW_corner and NE_corner\n";
- usage();
-}
-
-my ($S_lat,$W_lon) = get_latlon( $opts{'SW_corner'}, "SW" );
-my ($N_lat,$E_lon) = get_latlon( $opts{'NE_corner'}, "NE" );
-
-if ( $N_lat <= $S_lat ) {
- print "ERROR: NE corner latitude less than or equal to SW corner latitude\n";
- usage();
-}
-if ( $E_lon <= $W_lon ) {
- print "ERROR: NE corner longitude less than or equal to SW corner longitude\n";
- usage();
-}
-
-#-----------------------------------------------------------------------------------------------
-my $debug;
-if ( $opts{'debug'} ) {
- $debug = "DEBUG=TRUE";
-}
-my $print;
-if ( $opts{'verbose'} ) {
- $print = "PRINT=TRUE";
-}
-
-my %infiles = parse_filelist( $opts{'infilelist'} );
-my %outfiles = parse_filelist( $opts{'outfilelist'} );
-
-(my $GRIDFILE, my $NFILES, my $INFILES, my $OUTFILES) = get_filelists( \%infiles, \%outfiles );
-
-write_usermods( \%outfiles );
-
-my $cmd = "env S_LAT=$S_lat W_LON=$W_lon N_LAT=$N_lat E_LON=$E_lon " .
- "GRIDFILE=$GRIDFILE NFILES=$NFILES OUTFILELIST=$OUTFILES INFILELIST=$INFILES " .
- "$debug $print ncl $scrdir/getregional_datasets.ncl";
-
-print "Execute: $cmd\n";
-system( $cmd );
-system( "/bin/rm $INFILES $OUTFILES" );
-
-#-------------------------------------------------------------------------------
-
-sub parse_filelist {
-#
-# Parse a list of files (in "filename = 'filepath'" format) into a hash
-#
- my $file = shift;
-
- # check that the file exists
- (-f $file) or die "$nm: failed to find filelist file $file";
- my $fh = IO::File->new($file, '<') or die "$nm: can't open file: $file\n";
-
- my %files = ( );
- my $valstring1 = '\'[^\']*\'';
- my $valstring2 = '"[^"]*"';
- while( my $line = <$fh> ) {
- if ( $line =~ m/^\s*(\S+)\s*=\s*($valstring1|$valstring2)$/ ) {
- my $var = $1;
- my $string = $2;
- $string =~ s/'|"//g;
- if ( exists($files{$var}) ) {
- die "$nm: variable listed twice in file ($file): $var\n";
- }
- $files{$var} = $string;
- # Ignore empty lines or comments
- } elsif ( ($line =~ m/^\s*$/) || ($line =~ m/^\s*!/) ) {
- # ignore empty lines or comments
- } else {
- die "$nm: unexpected line in $file: $line\n";
- }
- }
- $fh->close;
-
- return( %files );
-}
-
-#-------------------------------------------------------------------------------
-
-sub get_filelists {
-#
-# Make sure file hashes compare correctly, and if so return in and out lists
-# on files
-#
- my $infiles_ref = shift;
- my $outfiles_ref = shift;
-
- my @infiles = sort( keys(%$infiles_ref ) );
- my @outfiles = sort( keys(%$outfiles_ref) );
-
- if ( $#infiles != $#outfiles ) {
- die "$nm: number of infiles is different from outfiles\n";
- }
- if ( "@infiles" ne "@outfiles" ) {
- die "$nm: list of infiles is different from outfiles list\n";
- }
- my $infilelist = "infilelist_getregional_datasets___tmp.lst";
- my $outfilelist = "outfilelist_getregional_datasets___tmp.lst";
- my $fhin = IO::File->new($infilelist, '>') or die "$nm: can't open file: $infilelist\n";
- my $fhout = IO::File->new($outfilelist, '>') or die "$nm: can't open file: $outfilelist\n";
-
- my $nfiles = 0;
- foreach my $file ( @infiles ) {
- my $infile = $$infiles_ref{$file};
- if ( ! -f "$infile" ) {
- die "$nm: infile ($file) $infile does NOT exist!\n";
- }
- print $fhin "$infile\n";
- my $outfile = $$outfiles_ref{$file};
- if ( -f "$outfile" ) {
- die "$nm: outfile ($file) $outfile already exists, delete it if you want to overwrite!\n";
- }
- print $fhout "$outfile\n";
- $nfiles++;
- }
- $fhin->close();
- $fhout->close();
- my $var = $gridfilename;
- my $gridfile = "";
- if ( exists($$infiles_ref{$var}) ) {
- $gridfile = $$infiles_ref{$var};
- } else {
- die "$nm: the grid file ($var) is required to be on the lists!\n";
- }
-
- return( $gridfile, $nfiles, $infilelist, $outfilelist );
-}
-
-#-------------------------------------------------------------------------------
-
-sub write_usermods {
-#
-# Write the user_nl_clm and xmlchng_cmnds files out
-# These can be used to setup a case after getregional_datasets is run.
-#
- my $outfiles_ref = shift;
-
- my $cwd = getcwd(); # current working directory
-
- #
- # Write out the user_nl_clm file
- #
- my $usrnlfile = "user_nl_clm";
- my $fh = IO::File->new($usrnlfile, '>') or die "$nm: can't open file: $usrnlfile\n";
-
- my $outgridfile = undef;
- foreach my $file ( sort(keys(%$outfiles_ref)) ) {
- my $filepath = $$outfiles_ref{$file};
- # Add current directory on front of path if not an absolute path in filepath
- if ( $filepath !~ m/^\// ) {
- $filepath = "$cwd/$filepath";
- }
- # Write all filenames out besides the gridfilename
- if ( $file ne $gridfilename ) {
- print $fh "$file = '$filepath'\n";
- } else {
- $outgridfile = $filepath;
- }
- }
- $fh->close();
- #
- # Write out the xmlchnge_cmnds file
- #
- (my $filename = $outgridfile)=~ s!(.*)/!!;
- my $filedir = $1;
- my $cmndsfile = "xmlchange_cmnds";
- my $fh = IO::File->new($cmndsfile, '>') or die "$nm: can't open file: $cmndsfile\n";
- print $fh "./xmlchange ATM_DOMAIN_PATH=$filedir\n";
- print $fh "./xmlchange LND_DOMAIN_PATH=$filedir\n";
- print $fh "./xmlchange ATM_DOMAIN_FILE=$filename\n";
- print $fh "./xmlchange LND_DOMAIN_FILE=$filename\n";
- $fh->close();
-}
-
-#-------------------------------------------------------------------------------
-
-sub absolute_path {
-#
-# Convert a pathname into an absolute pathname, expanding any . or .. characters.
-# Assumes pathnames refer to a local filesystem.
-# Assumes the directory separator is "/".
-#
- my $path = shift;
- my $cwd = getcwd(); # current working directory
- my $abspath; # resulting absolute pathname
-
-# Strip off any leading or trailing whitespace. (This pattern won't match if
-# there's embedded whitespace.
- $path =~ s!^\s*(\S*)\s*$!$1!;
-
-# Convert relative to absolute path.
-
- if ($path =~ m!^\.$!) { # path is "."
- return $cwd;
- } elsif ($path =~ m!^\./!) { # path starts with "./"
- $path =~ s!^\.!$cwd!;
- } elsif ($path =~ m!^\.\.$!) { # path is ".."
- $path = "$cwd/..";
- } elsif ($path =~ m!^\.\./!) { # path starts with "../"
- $path = "$cwd/$path";
- } elsif ($path =~ m!^[^/]!) { # path starts with non-slash character
- $path = "$cwd/$path";
- }
-
- my ($dir, @dirs2);
- my @dirs = split "/", $path, -1; # The -1 prevents split from stripping trailing nulls
- # This enables correct processing of the input "/".
-
- # Remove any "" that are not leading.
- for (my $i=0; $i<=$#dirs; ++$i) {
- if ($i == 0 or $dirs[$i] ne "") {
- push @dirs2, $dirs[$i];
- }
- }
- @dirs = ();
-
- # Remove any "."
- foreach $dir (@dirs2) {
- unless ($dir eq ".") {
- push @dirs, $dir;
- }
- }
- @dirs2 = ();
-
- # Remove the "subdir/.." parts.
- foreach $dir (@dirs) {
- if ( $dir !~ /^\.\.$/ ) {
- push @dirs2, $dir;
- } else {
- pop @dirs2; # remove previous dir when current dir is ..
- }
- }
- if ($#dirs2 == 0 and $dirs2[0] eq "") { return "/"; }
- $abspath = join '/', @dirs2;
- return( $abspath );
-}
-
-#-------------------------------------------------------------------------------
-
diff --git a/tools/site_and_regional/modify_singlept_site_neon.py b/tools/site_and_regional/modify_singlept_site_neon.py
index d3db55126b..1928653dcf 100755
--- a/tools/site_and_regional/modify_singlept_site_neon.py
+++ b/tools/site_and_regional/modify_singlept_site_neon.py
@@ -251,9 +251,9 @@ def find_surffile(surf_dir, site_name):
"""
# sf_name = "surfdata_hist_16pfts_Irrig_CMIP6_simyr2000_"+site_name+"*.nc"
- sf_name = "surfdata_hist_78pfts_CMIP6_simyr2000_" + site_name + "*.nc"
- # surf_file = glob.glob(os.path.join(surf_dir,sf_name))
- surf_file = glob.glob(surf_dir + "/" + sf_name)
+ sf_name = "surfdata_*hist_78pfts_CMIP6_simyr2000_" + site_name + "*.nc"
+ print (os.path.join(surf_dir , sf_name))
+ surf_file = sorted(glob.glob(os.path.join(surf_dir , sf_name)))
if len(surf_file) > 1:
print("The following files found :", *surf_file, sep="\n- ")
@@ -457,16 +457,21 @@ def download_file(url, fname):
fname (str) :
file name to save the downloaded file.
"""
- response = requests.get(url)
+ try:
+ response = requests.get(url)
- with open(fname, "wb") as f:
- f.write(response.content)
+ with open(fname, "wb") as f:
+ f.write(response.content)
- # -- Check if download status_code
- if response.status_code == 200:
- print("Download finished successfully for", fname, ".")
- elif response.status_code == 404:
- print("File " + fname + "was not available on the neon server:" + url)
+ # -- Check if download status_code
+ if response.status_code == 200:
+ print("Download finished successfully for", fname, ".")
+ elif response.status_code == 404:
+ print("File " + fname + "was not available on the neon server:" + url)
+ except Exception as err:
+ print ('The server could not fulfill the request.')
+ print ('Something went wrong in downloading', fname)
+ print ('Error code:', err.code)
def fill_interpolate(f2, var, method):
@@ -675,6 +680,7 @@ def main():
print("Updated : ", f2.PCT_CROP.values)
print("Updating PCT_NAT_PFT")
+ #print (f2.PCT_NAT_PFT)
print(f2.PCT_NAT_PFT.values[0])
f2.PCT_NAT_PFT.values[0] = [[100.0]]
print(f2.PCT_NAT_PFT[0].values)
diff --git a/tools/site_and_regional/neon_surf_wrapper.py b/tools/site_and_regional/neon_surf_wrapper.py
index e5b37a74e3..df58d3ab36 100755
--- a/tools/site_and_regional/neon_surf_wrapper.py
+++ b/tools/site_and_regional/neon_surf_wrapper.py
@@ -97,10 +97,12 @@ def main():
site = row['Site']
pft = row['pft']
print ("Now processing site :", site)
- command = ['./subset_data.py','point','--lat',str(lat),'--lon',str(lon),'--site',site,'--dompft',str(pft),'--crop']
+ command = ['./subset_data','point','--lat',str(lat),'--lon',str(lon),'--site',site,'--dompft',str(pft),'--crop',
+ '--create-surface','--uniform-snowpack','--cap-saturation','--verbose']
execute(command)
- command = ['./modify_singlept_site_neon.py','--neon_site',site]
+ command = ['./modify_singlept_site_neon.py','--neon_site',site, '--surf_dir',
+ 'subset_data_single_point']
execute(command)
if __name__ == "__main__":
diff --git a/tools/site_and_regional/run_neon.py b/tools/site_and_regional/run_neon.py
index 020bc2e8ee..101948b02c 100755
--- a/tools/site_and_regional/run_neon.py
+++ b/tools/site_and_regional/run_neon.py
@@ -1,5 +1,5 @@
#! /usr/bin/env python3
-
+
"""
|------------------------------------------------------------------|
|--------------------- Instructions -----------------------------|
@@ -40,25 +40,25 @@
./run_neon.py --help
-------------------------------------------------------------------
"""
-#TODO (NS)
-#- [ ]
-#- [ ] Case dependency and the ability to check case status
-#- [ ] If Case dependency works we don't need finidat given explicilty for post-ad and transient.
+# TODO (NS)
+# - [ ]
+# - [ ] Case dependency and the ability to check case status
+# - [ ] If Case dependency works we don't need finidat given explicilty for post-ad and transient.
-#- [ ] checkout_externals instead of using env varaiable
-#- [ ] wget the fields available and run for those available
-
-#- [ ] Matrix spin-up if (SASU) Eric merged it in
-#- [ ] Make sure both AD and SASU are not on at the same time
+# - [ ] checkout_externals instead of using env varaiable
+# - [ ] wget the fields available and run for those available
-#- [ ] Make sure CIME and other dependencies is checked out.
+# - [ ] Matrix spin-up if (SASU) Eric merged it in
+# - [ ] Make sure both AD and SASU are not on at the same time
+
+# - [ ] Make sure CIME and other dependencies is checked out.
-
-#Import libraries
+
+# Import libraries
import os
import sys
-import time
+import time
import shutil
import logging
import requests
@@ -66,159 +66,188 @@
import re
import subprocess
import pandas as pd
-import glob
+import glob
import datetime
from getpass import getuser
-
+
# Get the ctsm util tools and then the cime tools.
-_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..","..",'python'))
+_CTSM_PYTHON = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), "..", "..", "python")
+)
sys.path.insert(1, _CTSM_PYTHON)
from ctsm import add_cime_to_path
from ctsm.path_utils import path_to_ctsm_root
+from ctsm.download_utils import download_file
import CIME.build as build
from standard_script_setup import *
-from CIME.case import Case
-from CIME.utils import safe_copy, expect, symlink_force
-from argparse import RawTextHelpFormatter
-from CIME.locked_files import lock_file, unlock_file
+from CIME.case import Case
+from CIME.utils import safe_copy, expect, symlink_force
+from argparse import RawTextHelpFormatter
+from CIME.locked_files import lock_file, unlock_file
-logger = logging.getLogger(__name__)
-
-def get_parser(args, description, valid_neon_sites):
+logger = logging.getLogger(__name__)
+
+
+def get_parser(args, description, valid_neon_sites):
"""
Get parser object for this script.
"""
- parser = argparse.ArgumentParser(description=description,
- formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser = argparse.ArgumentParser(
+ description=description, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
CIME.utils.setup_standard_logging_options(parser)
-
+
parser.print_usage = parser.print_help
- parser.add_argument('--neon-sites',
- help='4-letter neon site code.',
- action="store",
- required=False,
- choices=valid_neon_sites + ['all'],
- dest="neon_sites",
- default=["OSBS"],
- nargs='+')
-
- parser.add_argument('--base-case',
- help='''
+ parser.add_argument(
+ "--neon-sites",
+ help="4-letter neon site code.",
+ action="store",
+ required=False,
+ choices=valid_neon_sites + ["all"],
+ dest="neon_sites",
+ default=["OSBS"],
+ nargs="+",
+ )
+
+ parser.add_argument(
+ "--base-case",
+ help="""
Root Directory of base case build
[default: %(default)s]
- ''',
- action="store",
- dest="base_case_root",
- type =str,
- required=False,
- default=None)
-
- parser.add_argument('--output-root',
- help='''
+ """,
+ action="store",
+ dest="base_case_root",
+ type=str,
+ required=False,
+ default=None,
+ )
+
+ parser.add_argument(
+ "--output-root",
+ help="""
Root output directory of cases
[default: %(default)s]
- ''',
- action="store",
- dest="output_root",
- type =str,
- required=False,
- default="CIME_OUTPUT_ROOT as defined in cime")
-
- parser.add_argument('--overwrite',
- help='''
+ """,
+ action="store",
+ dest="output_root",
+ type=str,
+ required=False,
+ default="CIME_OUTPUT_ROOT as defined in cime",
+ )
+
+ parser.add_argument(
+ "--overwrite",
+ help="""
overwrite existing case directories
[default: %(default)s]
- ''',
- action="store_true",
- dest="overwrite",
- required = False,
- default = False)
-
- parser.add_argument('--setup-only',
- help='''
+ """,
+ action="store_true",
+ dest="overwrite",
+ required=False,
+ default=False,
+ )
+
+ parser.add_argument(
+ "--setup-only",
+ help="""
Only setup the requested cases, do not build or run
[default: %(default)s]
- ''',
- action="store_true",
- dest="setup_only",
- required = False,
- default = False)
-
- parser.add_argument('--rerun',
- help='''
+ """,
+ action="store_true",
+ dest="setup_only",
+ required=False,
+ default=False,
+ )
+
+ parser.add_argument(
+ "--rerun",
+ help="""
If the case exists but does not appear to be complete, restart it.
[default: %(default)s]
- ''',
- action="store_true",
- dest="rerun",
- required = False,
- default = False)
-
- parser.add_argument('--no-batch',
- help='''
+ """,
+ action="store_true",
+ dest="rerun",
+ required=False,
+ default=False,
+ )
+
+ parser.add_argument(
+ "--no-batch",
+ help="""
Run locally, do not use batch queueing system (if defined for Machine)
[default: %(default)s]
- ''',
- action="store_true",
- dest="no_batch",
- required = False,
- default = False)
-
- parser.add_argument('--run-type',
- help='''
+ """,
+ action="store_true",
+ dest="no_batch",
+ required=False,
+ default=False,
+ )
+
+ parser.add_argument(
+ "--run-type",
+ help="""
Type of run to do
[default: %(default)s]
- ''',
- choices = ["ad", "postad", "transient", "sasu"],
- default = "transient")
-
- parser.add_argument ('--run-length',
- help='''
+ """,
+ choices=["ad", "postad", "transient", "sasu"],
+ default="transient",
+ )
+
+ parser.add_argument(
+ "--run-length",
+ help="""
How long to run (modified ISO 8601 duration)
[default: %(default)s]
- ''',
- required = False,
- type = str,
- default = '0Y')
-
- parser.add_argument('--start-date',
- help='''
+ """,
+ required=False,
+ type=str,
+ default="0Y",
+ )
+
+ parser.add_argument(
+ "--start-date",
+ help="""
Start date for running CTSM simulation in ISO format.
[default: %(default)s]
- ''',
- action="store",
- dest="start_date",
- required = False,
- type = datetime.date.fromisoformat,
- default = datetime.datetime.strptime("2018-01-01",'%Y-%m-%d'))
-
- parser.add_argument('--end-date',
- help='''
+ """,
+ action="store",
+ dest="start_date",
+ required=False,
+ type=datetime.date.fromisoformat,
+ default=datetime.datetime.strptime("2018-01-01", "%Y-%m-%d"),
+ )
+
+ parser.add_argument(
+ "--end-date",
+ help="""
End date for running CTSM simulation in ISO format.
[default: %(default)s]
- ''',
- action="store",
- dest="end_date",
- required = False,
- type = datetime.date.fromisoformat,
- default = datetime.datetime.strptime("2021-01-01",'%Y-%m-%d'))
-
- parser.add_argument('--run-from-postad',
- help='''
+ """,
+ action="store",
+ dest="end_date",
+ required=False,
+ type=datetime.date.fromisoformat,
+ default=datetime.datetime.strptime("2021-01-01", "%Y-%m-%d"),
+ )
+
+ parser.add_argument(
+ "--run-from-postad",
+ help="""
For transient runs only - should we start from the postad spinup or finidat?
By default start from finidat, if this flag is used the postad run must be available.
- ''',
- action="store_true",
- required = False,
- default = False)
+ """,
+ action="store_true",
+ required=False,
+ default=False,
+ )
args = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser)
- if 'all' in args.neon_sites:
+ if "all" in args.neon_sites:
neon_sites = valid_neon_sites
else:
neon_sites = args.neon_sites
@@ -229,15 +258,15 @@ def get_parser(args, description, valid_neon_sites):
if "CIME_OUTPUT_ROOT" in args.output_root:
args.output_root = None
- if args.run_length == '0Y':
- if args.run_type == 'ad':
- run_length = '200Y'
- elif args.run_type == 'postad':
- run_length = '50Y'
+ if args.run_length == "0Y":
+ if args.run_type == "ad":
+ run_length = "200Y"
+ elif args.run_type == "postad":
+ run_length = "50Y"
else:
# The transient run length is set by cdeps atm buildnml to the last date of the available tower data
# this value is not used
- run_length = '4Y'
+ run_length = "4Y"
run_length = parse_isoduration(run_length)
base_case_root = None
@@ -249,7 +278,19 @@ def get_parser(args, description, valid_neon_sites):
root_logger = logging.getLogger()
root_logger.setLevel(logging.WARN)
- return neon_sites, args.output_root, args.run_type, args.overwrite, run_length, base_case_root, args.run_from_postad, args.setup_only, args.no_batch, args.rerun
+ return (
+ neon_sites,
+ args.output_root,
+ args.run_type,
+ args.overwrite,
+ run_length,
+ base_case_root,
+ args.run_from_postad,
+ args.setup_only,
+ args.no_batch,
+ args.rerun,
+ )
+
def get_isosplit(s, split):
if split in s:
@@ -258,92 +299,109 @@ def get_isosplit(s, split):
n = 0
return n, s
+
def parse_isoduration(s):
- '''
+ """
simple ISO 8601 duration parser, does not account for leap years and assumes 30 day months
- '''
+ """
# Remove prefix
- s = s.split('P')[-1]
-
+ s = s.split("P")[-1]
+
# Step through letter dividers
- years, s = get_isosplit(s, 'Y')
- months, s = get_isosplit(s, 'M')
- days, s = get_isosplit(s, 'D')
-
+ years, s = get_isosplit(s, "Y")
+ months, s = get_isosplit(s, "M")
+ days, s = get_isosplit(s, "D")
+
# Convert all to timedelta
- dt = datetime.timedelta(days=int(days)+365*int(years)+30*int(months))
- return int(dt.total_seconds()/86400)
+ dt = datetime.timedelta(days=int(days) + 365 * int(years) + 30 * int(months))
+ return int(dt.total_seconds() / 86400)
+
-class NeonSite :
+class NeonSite:
"""
A class for encapsulating neon sites.
-
+
...
-
+
Attributes
----------
-
+
Methods
-------
"""
+
def __init__(self, name, start_year, end_year, start_month, end_month, finidat):
self.name = name
- self.start_year= int(start_year)
+ self.start_year = int(start_year)
self.end_year = int(end_year)
self.start_month = int(start_month)
self.end_month = int(end_month)
self.cesmroot = path_to_ctsm_root()
self.finidat = finidat
-
- def __str__(self):
- return str(self.__class__) + '\n' + '\n'.join((str(item) + ' = '
- for item in (self.__dict__)))
- def build_base_case(self, cesmroot, output_root, res, compset, overwrite=False, setup_only=False):
+ def __str__(self):
+ return (
+ str(self.__class__)
+ + "\n"
+ + "\n".join((str(item) + " = " for item in (self.__dict__)))
+ )
+
+ def build_base_case(
+ self, cesmroot, output_root, res, compset, overwrite=False, setup_only=False
+ ):
"""
Function for building a base_case to clone.
To spend less time on building ctsm for the neon cases,
all the other cases are cloned from this case
-
+
Args:
- self:
+ self:
The NeonSite object
- base_root (str):
- root of the base_case CIME
+ base_root (str):
+ root of the base_case CIME
res (str):
base_case resolution or gridname
compset (str):
base case compset
- overwrite (bool) :
+ overwrite (bool) :
Flag to overwrite the case if exists
"""
print("---- building a base case -------")
self.base_case_root = output_root
- user_mods_dirs = [os.path.join(cesmroot,"cime_config","usermods_dirs","NEON",self.name)]
+ user_mods_dirs = [
+ os.path.join(cesmroot, "cime_config", "usermods_dirs", "NEON", self.name)
+ ]
if not output_root:
output_root = os.getcwd()
- case_path = os.path.join(output_root,self.name)
-
- logger.info ('base_case_name : {}'.format(self.name))
- logger.info ('user_mods_dir : {}'.format(user_mods_dirs[0]))
+ case_path = os.path.join(output_root, self.name)
+
+ logger.info("base_case_name : {}".format(self.name))
+ logger.info("user_mods_dir : {}".format(user_mods_dirs[0]))
if overwrite and os.path.isdir(case_path):
- print ("Removing the existing case at: {}".format(case_path))
+ print("Removing the existing case at: {}".format(case_path))
shutil.rmtree(case_path)
with Case(case_path, read_only=False) as case:
if not os.path.isdir(case_path):
print("---- creating a base case -------")
- case.create(case_path, cesmroot, compset, res,
- run_unsupported=True, answer="r",output_root=output_root,
- user_mods_dirs = user_mods_dirs, driver="nuopc")
+ case.create(
+ case_path,
+ cesmroot,
+ compset,
+ res,
+ run_unsupported=True,
+ answer="r",
+ output_root=output_root,
+ user_mods_dirs=user_mods_dirs,
+ driver="nuopc",
+ )
print("---- base case created ------")
- #--change any config for base_case:
- #case.set_value("RUN_TYPE","startup")
-
+ # --change any config for base_case:
+ # case.set_value("RUN_TYPE","startup")
print("---- base case setup ------")
case.case_setup()
@@ -355,26 +413,42 @@ def build_base_case(self, cesmroot, output_root, res, compset, overwrite=False,
return case_path
print("---- base case build ------")
- # always walk through the build process to make sure it's up to date.
+ # always walk through the build process to make sure it's up to date.
t0 = time.time()
build.case_build(case_path, case=case)
t1 = time.time()
- total = t1-t0
- print ("Time required to building the base case: {} s.".format(total))
+ total = t1 - t0
+ print("Time required to building the base case: {} s.".format(total))
# update case_path to be the full path to the base case
return case_path
def diff_month(self):
- d1 = datetime.datetime(self.end_year,self.end_month, 1)
+ d1 = datetime.datetime(self.end_year, self.end_month, 1)
d2 = datetime.datetime(self.start_year, self.start_month, 1)
return (d1.year - d2.year) * 12 + d1.month - d2.month
-
-
- def run_case(self, base_case_root, run_type, run_length, overwrite=False, setup_only=False, no_batch=False, rerun=False):
- user_mods_dirs = [os.path.join(self.cesmroot,"cime_config","usermods_dirs","NEON",self.name)]
- expect(os.path.isdir(base_case_root), "Error base case does not exist in {}".format(base_case_root))
- case_root = os.path.abspath(os.path.join(base_case_root,"..", self.name+"."+run_type))
+ def run_case(
+ self,
+ base_case_root,
+ run_type,
+ run_length,
+ overwrite=False,
+ setup_only=False,
+ no_batch=False,
+ rerun=False,
+ ):
+ user_mods_dirs = [
+ os.path.join(
+ self.cesmroot, "cime_config", "usermods_dirs", "NEON", self.name
+ )
+ ]
+ expect(
+ os.path.isdir(base_case_root),
+ "Error base case does not exist in {}".format(base_case_root),
+ )
+ case_root = os.path.abspath(
+ os.path.join(base_case_root, "..", self.name + "." + run_type)
+ )
rundir = None
if os.path.isdir(case_root):
if overwrite:
@@ -383,78 +457,88 @@ def run_case(self, base_case_root, run_type, run_length, overwrite=False, setup_
elif rerun:
with Case(case_root, read_only=False) as case:
rundir = case.get_value("RUNDIR")
- if os.path.isfile(os.path.join(rundir,"ESMF_Profile.summary")):
- print("Case {} appears to be complete, not rerunning.".format(case_root))
+ if os.path.isfile(os.path.join(rundir, "ESMF_Profile.summary")):
+ print(
+ "Case {} appears to be complete, not rerunning.".format(
+ case_root
+ )
+ )
elif not setup_only:
print("Resubmitting case {}".format(case_root))
case.submit(no_batch=no_batch)
return
else:
- logger.warning("Case already exists in {}, not overwritting.".format(case_root))
+ logger.warning(
+ "Case already exists in {}, not overwritting.".format(case_root)
+ )
return
if run_type == "postad":
- adcase_root = case_root.replace('.postad','.ad')
+ adcase_root = case_root.replace(".postad", ".ad")
if not os.path.isdir(adcase_root):
- logger.warning("postad requested but no ad case found in {}".format(adcase_root))
+ logger.warning(
+ "postad requested but no ad case found in {}".format(adcase_root)
+ )
return
if not os.path.isdir(case_root):
# read_only = False should not be required here
with Case(base_case_root, read_only=False) as basecase:
print("---- cloning the base case in {}".format(case_root))
- basecase.create_clone(case_root, keepexe=True, user_mods_dirs=user_mods_dirs)
+ basecase.create_clone(
+ case_root, keepexe=True, user_mods_dirs=user_mods_dirs
+ )
with Case(case_root, read_only=False) as case:
# in order to avoid the complication of leap years we always set the run_length in units of days.
case.set_value("STOP_OPTION", "ndays")
case.set_value("STOP_N", run_length)
- case.set_value("REST_OPTION","end")
+ case.set_value("REST_OPTION", "end")
case.set_value("CONTINUE_RUN", False)
-
+
if run_type == "ad":
- case.set_value("CLM_FORCE_COLDSTART","on")
- case.set_value("CLM_ACCELERATED_SPINUP","on")
+ case.set_value("CLM_FORCE_COLDSTART", "on")
+ case.set_value("CLM_ACCELERATED_SPINUP", "on")
case.set_value("RUN_REFDATE", "0018-01-01")
- case.set_value("RUN_STARTDATE", "0018-01-01")
+ case.set_value("RUN_STARTDATE", "0018-01-01")
else:
- case.set_value("CLM_FORCE_COLDSTART","off")
- case.set_value("CLM_ACCELERATED_SPINUP","off")
+ case.set_value("CLM_FORCE_COLDSTART", "off")
+ case.set_value("CLM_ACCELERATED_SPINUP", "off")
case.set_value("RUN_TYPE", "hybrid")
-
+
if run_type == "postad":
self.set_ref_case(case)
-
+
if run_type == "transient":
if self.finidat:
- case.set_value("RUN_TYPE","startup")
+ case.set_value("RUN_TYPE", "startup")
else:
if not self.set_ref_case(case):
return
- case.set_value("STOP_OPTION","nmonths")
+ case.set_value("STOP_OPTION", "nmonths")
case.set_value("STOP_N", self.diff_month())
- case.set_value("DATM_YR_ALIGN",self.start_year)
- case.set_value("DATM_YR_START",self.start_year)
- case.set_value("DATM_YR_END",self.end_year)
- case.set_value("CALENDAR","GREGORIAN")
+ case.set_value("DATM_YR_ALIGN", self.start_year)
+ case.set_value("DATM_YR_START", self.start_year)
+ case.set_value("DATM_YR_END", self.end_year)
+ case.set_value("CALENDAR", "GREGORIAN")
else:
# for the spinup we want the start and end on year boundaries
if self.start_month == 1:
- case.set_value("DATM_YR_ALIGN",self.start_year)
- case.set_value("DATM_YR_START",self.start_year)
+ case.set_value("DATM_YR_ALIGN", self.start_year)
+ case.set_value("DATM_YR_START", self.start_year)
elif self.start_year + 1 <= self.end_year:
- case.set_value("DATM_YR_ALIGN",self.start_year+1)
- case.set_value("DATM_YR_START",self.start_year+1)
+ case.set_value("DATM_YR_ALIGN", self.start_year + 1)
+ case.set_value("DATM_YR_START", self.start_year + 1)
if self.end_month == 12:
- case.set_value("DATM_YR_END",self.end_year)
+ case.set_value("DATM_YR_END", self.end_year)
else:
- case.set_value("DATM_YR_END",self.end_year-1)
+ case.set_value("DATM_YR_END", self.end_year - 1)
if not rundir:
rundir = case.get_value("RUNDIR")
self.modify_user_nl(case_root, run_type, rundir)
-
+
case.create_namelists()
# explicitly run check_input_data
case.check_all_input_data()
@@ -465,25 +549,31 @@ def set_ref_case(self, case):
rundir = case.get_value("RUNDIR")
case_root = case.get_value("CASEROOT")
if case_root.endswith(".postad"):
- ref_case_root = case_root.replace(".postad",".ad")
+ ref_case_root = case_root.replace(".postad", ".ad")
root = ".ad"
else:
- ref_case_root = case_root.replace(".transient",".postad")
+ ref_case_root = case_root.replace(".transient", ".postad")
root = ".postad"
if not os.path.isdir(ref_case_root):
- logger.warning("ERROR: spinup must be completed first, could not find directory {}".format(ref_case_root))
+ logger.warning(
+ "ERROR: spinup must be completed first, could not find directory {}".format(
+ ref_case_root
+ )
+ )
return False
-
+
with Case(ref_case_root) as refcase:
refrundir = refcase.get_value("RUNDIR")
case.set_value("RUN_REFDIR", refrundir)
case.set_value("RUN_REFCASE", os.path.basename(ref_case_root))
refdate = None
- for reffile in glob.iglob(refrundir + "/{}{}.clm2.r.*.nc".format(self.name, root)):
+ for reffile in glob.iglob(
+ refrundir + "/{}{}.clm2.r.*.nc".format(self.name, root)
+ ):
m = re.search("(\d\d\d\d-\d\d-\d\d)-\d\d\d\d\d.nc", reffile)
if m:
refdate = m.group(1)
- symlink_force(reffile, os.path.join(rundir,os.path.basename(reffile)))
+ symlink_force(reffile, os.path.join(rundir, os.path.basename(reffile)))
logger.info("Found refdate of {}".format(refdate))
if not refdate:
logger.warning("Could not find refcase for {}".format(case_root))
@@ -491,182 +581,195 @@ def set_ref_case(self, case):
for rpfile in glob.iglob(refrundir + "/rpointer*"):
safe_copy(rpfile, rundir)
- if not os.path.isdir(os.path.join(rundir, "inputdata")) and os.path.isdir(os.path.join(refrundir,"inputdata")):
- symlink_force(os.path.join(refrundir,"inputdata"),os.path.join(rundir,"inputdata"))
-
+ if not os.path.isdir(os.path.join(rundir, "inputdata")) and os.path.isdir(
+ os.path.join(refrundir, "inputdata")
+ ):
+ symlink_force(
+ os.path.join(refrundir, "inputdata"), os.path.join(rundir, "inputdata")
+ )
case.set_value("RUN_REFDATE", refdate)
if case_root.endswith(".postad"):
case.set_value("RUN_STARTDATE", refdate)
else:
- case.set_value("RUN_STARTDATE", "{yr:04d}-{mo:02d}-01".format(yr=self.start_year, mo=self.start_month))
+ case.set_value(
+ "RUN_STARTDATE",
+ "{yr:04d}-{mo:02d}-01".format(yr=self.start_year, mo=self.start_month),
+ )
return True
-
+
def modify_user_nl(self, case_root, run_type, rundir):
user_nl_fname = os.path.join(case_root, "user_nl_clm")
user_nl_lines = None
if run_type == "transient":
if self.finidat:
- user_nl_lines = ["finidat = '{}/inputdata/lnd/ctsm/initdata/{}'".format(rundir,self.finidat)]
+ user_nl_lines = [
+ "finidat = '{}/inputdata/lnd/ctsm/initdata/{}'".format(
+ rundir, self.finidat
+ )
+ ]
else:
user_nl_lines = [
"hist_fincl2 = ''",
"hist_mfilt = 20",
"hist_nhtfrq = -8760",
"hist_empty_htapes = .true.",
- "hist_fincl1 = 'TOTECOSYSC', 'TOTECOSYSN', 'TOTSOMC', 'TOTSOMN', 'TOTVEGC', 'TOTVEGN', 'TLAI', 'GPP', 'CPOOL', 'NPP', 'TWS', 'H2OSNO'"]
-
+ "hist_fincl1 = 'TOTECOSYSC', 'TOTECOSYSN', 'TOTSOMC', 'TOTSOMN', 'TOTVEGC', 'TOTVEGN', 'TLAI', 'GPP', 'CPOOL', 'NPP', 'TWS', 'H2OSNO'",
+ ]
+
if user_nl_lines:
with open(user_nl_fname, "a") as fd:
for line in user_nl_lines:
fd.write("{}\n".format(line))
-
def check_neon_listing(valid_neon_sites):
"""
A function to download and parse neon listing file.
"""
- listing_file = 'listing.csv'
- url = 'https://neon-ncar.s3.data.neonscience.org/listing.csv'
-
+ listing_file = "listing.csv"
+ url = "https://neon-ncar.s3.data.neonscience.org/listing.csv"
+
download_file(url, listing_file)
- available_list= parse_neon_listing(listing_file, valid_neon_sites)
+ available_list = parse_neon_listing(listing_file, valid_neon_sites)
return available_list
+
def parse_neon_listing(listing_file, valid_neon_sites):
"""
A function to parse neon listing file
and find neon sites with the dates
where data is available.
-
+
Args:
listing_file (str): downloaded listing file
-
+
Returns:
available_list :
list of neon_site objects that is found
on the downloaded listing file.
"""
-
- #pd.set_option("display.max_rows", None, "display.max_columns", None)
-
+
+ # pd.set_option("display.max_rows", None, "display.max_columns", None)
+
available_list = []
-
+
df = pd.read_csv(listing_file)
-
+
# check for finidat files for transient run
- finidatlist = df[df['object'].str.contains("lnd/ctsm")]
+ finidatlist = df[df["object"].str.contains("lnd/ctsm")]
- #-- filter lines with atm/cdep
- df = df[df['object'].str.contains("atm/cdeps/")]
+ # -- filter lines with atm/cdep
+ df = df[df["object"].str.contains("atm/cdeps/")]
- #-- split the object str to extract site name
- df=df['object'].str.split("/", expand=True)
-
- #-- groupby site name
+ # -- split the object str to extract site name
+ df = df["object"].str.split("/", expand=True)
+
+ # -- groupby site name
grouped_df = df.groupby(7)
for key, item in grouped_df:
- #-- check if it is a valid neon site
+ # -- check if it is a valid neon site
if any(key in x for x in valid_neon_sites):
site_name = key
tmp_df = grouped_df.get_group(key)
-
- #-- filter files only ending with YYYY-MM.nc
- tmp_df = tmp_df[tmp_df[8].str.contains('\d\d\d\d-\d\d.nc')]
+
+ # -- filter files only ending with YYYY-MM.nc
+ tmp_df = tmp_df[tmp_df[8].str.contains("\d\d\d\d-\d\d.nc")]
latest_version = tmp_df[6].iloc[-1]
tmp_df = tmp_df[tmp_df[6].str.contains(latest_version)]
- #-- remove .nc from the file names
- tmp_df[8] = tmp_df[8].str.replace('.nc','')
-
- tmp_df2 = tmp_df[8].str.split("-", expand=True)
+ # -- remove .nc from the file names
+ tmp_df[8] = tmp_df[8].str.replace(".nc", "")
+
+ tmp_df2 = tmp_df[8].str.split("-", expand=True)
# ignore any prefix in file name and just get year
tmp_df2[0] = tmp_df2[0].str.slice(-4)
- #-- figure out start_year and end_year
+ # -- figure out start_year and end_year
start_year = int(tmp_df2[0].iloc[0])
end_year = int(tmp_df2[0].iloc[-1])
-
- #-- figure out start_month and end_month
+
+ # -- figure out start_month and end_month
start_month = int(tmp_df2[1].iloc[0])
end_month = int(tmp_df2[1].iloc[-1])
- logger.debug ("Valid neon site " + site_name+" found!")
- logger.debug ("File version {}".format(latest_version))
- logger.debug ('start_year={}'.format(start_year))
- logger.debug ('end_year={}'.format(end_year))
- logger.debug ('start_month={}'.format(start_month))
- logger.debug ('end_month={}'.format(end_month))
+ logger.debug("Valid neon site " + site_name + " found!")
+ logger.debug("File version {}".format(latest_version))
+ logger.debug("start_year={}".format(start_year))
+ logger.debug("end_year={}".format(end_year))
+ logger.debug("start_month={}".format(start_month))
+ logger.debug("end_month={}".format(end_month))
finidat = None
- for line in finidatlist['object']:
+ for line in finidatlist["object"]:
if site_name in line:
- finidat = line.split(',')[0].split('/')[-1]
-
- neon_site = NeonSite(site_name, start_year, end_year, start_month, end_month, finidat)
- logger.debug (neon_site)
+ finidat = line.split(",")[0].split("/")[-1]
+
+ neon_site = NeonSite(
+ site_name, start_year, end_year, start_month, end_month, finidat
+ )
+ logger.debug(neon_site)
available_list.append(neon_site)
-
+
return available_list
-def download_file(url, fname):
- """
- Function to download a file.
-
- Args:
- url (str):
- url of the file for downloading
-
- fname (str) :
- file name to save the downloaded file.
- """
- response = requests.get(url)
-
- with open(fname, 'wb') as f:
- f.write(response.content)
-
- #-- Check if download status_code
- if response.status_code == 200:
- print('Download finished successfully for', fname,'.')
- elif response.status_code == 404:
- print('File '+fname+'was not available on the neon server:'+ url)
def main(description):
cesmroot = path_to_ctsm_root()
# Get the list of supported neon sites from usermods
- valid_neon_sites = glob.glob(os.path.join(cesmroot,"cime_config","usermods_dirs","NEON","[!d]*"))
- valid_neon_sites = sorted([v.split('/')[-1] for v in valid_neon_sites])
-
- site_list, output_root, run_type, overwrite, run_length, base_case_root, run_from_postad, setup_only, no_batch, rerun = get_parser(sys.argv, description, valid_neon_sites)
+ valid_neon_sites = glob.glob(
+ os.path.join(cesmroot, "cime_config", "usermods_dirs", "NEON", "[!d]*")
+ )
+ valid_neon_sites = sorted([v.split("/")[-1] for v in valid_neon_sites])
+
+ (
+ site_list,
+ output_root,
+ run_type,
+ overwrite,
+ run_length,
+ base_case_root,
+ run_from_postad,
+ setup_only,
+ no_batch,
+ rerun,
+ ) = get_parser(sys.argv, description, valid_neon_sites)
if output_root:
- logger.debug ("output_root : "+ output_root)
+ logger.debug("output_root : " + output_root)
if not os.path.exists(output_root):
os.makedirs(output_root)
- #-- check neon listing file for available data:
+ # -- check neon listing file for available data:
available_list = check_neon_listing(valid_neon_sites)
- #=================================
- #-- all neon sites can be cloned from one generic case
- #-- so no need to define a base_case for every site.
+ # =================================
+ # -- all neon sites can be cloned from one generic case
+ # -- so no need to define a base_case for every site.
res = "CLM_USRDAT"
compset = "I1PtClm51Bgc"
- #-- Looping over neon sites
+ # -- Looping over neon sites
- for neon_site in available_list:
+ for neon_site in available_list:
if neon_site.name in site_list:
if run_from_postad:
neon_site.finidat = None
if not base_case_root:
- base_case_root = neon_site.build_base_case(cesmroot, output_root, res,
- compset, overwrite, setup_only)
- logger.info ("-----------------------------------")
- logger.info ("Running CTSM for neon site : {}".format(neon_site.name))
- neon_site.run_case(base_case_root, run_type, run_length, overwrite, setup_only, no_batch, rerun)
-
-if __name__ == "__main__":
- main(__doc__)
-
-
+ base_case_root = neon_site.build_base_case(
+ cesmroot, output_root, res, compset, overwrite, setup_only
+ )
+ logger.info("-----------------------------------")
+ logger.info("Running CTSM for neon site : {}".format(neon_site.name))
+ neon_site.run_case(
+ base_case_root,
+ run_type,
+ run_length,
+ overwrite,
+ setup_only,
+ no_batch,
+ rerun,
+ )
+
+
+if __name__ == "__main__":
+ main(__doc__)
diff --git a/tools/site_and_regional/sample_inlist b/tools/site_and_regional/sample_inlist
deleted file mode 100644
index 0b13271540..0000000000
--- a/tools/site_and_regional/sample_inlist
+++ /dev/null
@@ -1,18 +0,0 @@
-!
-! Sample input file of global datasets to extract a region from. Only works
-! on standard 2D CLM files such as: fatmlndfrc, fsurdat, flanduse_timeseries,
-! stream_fldfilename_ndep, stream_fldfilename_lightng, and/or
-! stream_fldfilename_popdens.
-!
-! format: list of filenames similar to namelist format with a single line
-! per file with the format of:
-!
-! variable = "filepath"
-!
-! Lines (such as these) with "!" are ignored.
-!
-
-! fatmlndfrc is REQUIRED! MUST BE ON THE SAME GRID AS ALL OF THE DATAFILES BELOW!!!!
-
- fatmlndfrc = '/glade/p/cesmdata/cseg/inputdata/share/domains/domain.clm/domain.lnd.0.125x0.125_tx0.1v2.140704.nc'
- fsurdat = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/surfdata_map/surfdata_0.125x0.125_simyr2000_c150114.nc'
diff --git a/tools/site_and_regional/sample_inlist_0.5popd b/tools/site_and_regional/sample_inlist_0.5popd
deleted file mode 100644
index 8d4480fff4..0000000000
--- a/tools/site_and_regional/sample_inlist_0.5popd
+++ /dev/null
@@ -1,22 +0,0 @@
-!
-! Sample input file of global datasets to extract a region from. Only works
-! on standard 2D CLM files such as: fatmlndfrc, fsurdat, flanduse_timeseries,
-! stream_fldfilename_ndep, stream_fldfilename_lightng, and/or
-! stream_fldfilename_popdens.
-!
-! format: list of filenames similar to namelist format with a single line
-! per file with the format of:
-!
-! variable = "filepath"
-!
-! Lines (such as these) with "!" are ignored.
-!
-
-! fatmlndfrc is REQUIRED! MUST BE ON THE SAME GRID AS ALL OF THE DATAFILES BELOW!!!!
-
- fatmlndfrc = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/firedata/clmforc.Li_2012_hdm_0.5x0.5_AVHRR_simyr1850-2010_c130401.nc'
-!
-! The following files are interpolated by default so technically do NOT need
-! to be extracted, but it will help performance.
-!
- stream_fldfilename_popdens = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/firedata/clmforc.Li_2012_hdm_0.5x0.5_AVHRR_simyr1850-2010_c130401.nc'
diff --git a/tools/site_and_regional/sample_inlist_T62 b/tools/site_and_regional/sample_inlist_T62
deleted file mode 100644
index ac5ee2ed8d..0000000000
--- a/tools/site_and_regional/sample_inlist_T62
+++ /dev/null
@@ -1,23 +0,0 @@
-!
-! Sample input file of global datasets to extract a region from. Only works
-! on standard 2D CLM files such as: fatmlndfrc, fsurdat, flanduse_timeseries,
-! stream_fldfilename_ndep, stream_fldfilename_lightng, and/or
-! stream_fldfilename_popdens.
-!
-! format: list of filenames similar to namelist format with a single line
-! per file with the format of:
-!
-! variable = "filepath"
-!
-! Lines (such as these) with "!" are ignored.
-!
-
-! fatmlndfrc is REQUIRED! MUST BE ON THE SAME GRID AS ALL OF THE DATAFILES BELOW!!!!
-
- fatmlndfrc = '/glade/p/cesmdata/cseg/inputdata/atm/datm7/atm_forcing.datm7.Qian.T62.c080727/domain.T62.050609.nc'
-!
-! The following files are interpolated by default so technically do NOT need
-! to be extracted, but it will help performance.
-!
- stream_fldfilename_lightng = '/glade/p/cesmdata/cseg/inputdata/atm/datm7/NASA_LIS/clmforc.Li_2012_climo1995-2011.T62.lnfm_c130327.nc'
- strm_datfil = '/glade/p/cesmdata/cseg/inputdata/atm/datm7/atm_forcing.datm7.Qian.T62.c080727/Solar6Hrly/clmforc.Qian.c2006.T62.Solr.1948-01.nc'
diff --git a/tools/site_and_regional/sample_inlist_ndep b/tools/site_and_regional/sample_inlist_ndep
deleted file mode 100644
index 726c7ffbaf..0000000000
--- a/tools/site_and_regional/sample_inlist_ndep
+++ /dev/null
@@ -1,22 +0,0 @@
-!
-! Sample input file of global datasets to extract a region from. Only works
-! on standard 2D CLM files such as: fatmlndfrc, fsurdat, flanduse_timeseries,
-! stream_fldfilename_ndep, stream_fldfilename_lightng, and/or
-! stream_fldfilename_popdens.
-!
-! format: list of filenames similar to namelist format with a single line
-! per file with the format of:
-!
-! variable = "filepath"
-!
-! Lines (such as these) with "!" are ignored.
-!
-
-! fatmlndfrc is REQUIRED! MUST BE ON THE SAME GRID AS ALL OF THE DATAFILES BELOW!!!!
-
- fatmlndfrc = '/glade/p/cesmdata/cseg/inputdata/share/domains/domain.lnd.fv1.9x2.5_gx1v6.090206.nc'
-!
-! The following files are interpolated by default so technically do NOT need
-! to be extracted, but it will help performance.
-!
- stream_fldfilename_ndep = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/ndepdata/fndep_clm_hist_simyr1849-2006_1.9x2.5_c100428.nc'
diff --git a/tools/site_and_regional/sample_outlist b/tools/site_and_regional/sample_outlist
deleted file mode 100644
index fd153e7206..0000000000
--- a/tools/site_and_regional/sample_outlist
+++ /dev/null
@@ -1,14 +0,0 @@
-!
-! Sample input file of regional datasets that will be created. You need to have
-! the same list of files as in the input filelist as well. See the sample_inlist
-! for the list of files that can be operated on.
-!
-! format: list of filenames similar to namelist format with a single line
-! per file with the format of:
-!
-! variable = "filepath"
-!
-! Lines (such as these) with "!" are ignored.
-!
- fatmlndfrc = 'domain.lnd.184x256pt_0.125x0.125_alaskaUSA_tx0.1v2_c150114.nc'
- fsurdat = 'surfdata_184x256pt_0.125x0.125_alaskaUSA_simyr2000_c150114.nc'
diff --git a/tools/site_and_regional/sample_outlist_0.5popd b/tools/site_and_regional/sample_outlist_0.5popd
deleted file mode 100644
index 671a55037d..0000000000
--- a/tools/site_and_regional/sample_outlist_0.5popd
+++ /dev/null
@@ -1,14 +0,0 @@
-!
-! Sample input file of regional datasets that will be created. You need to have
-! the same list of files as in the input filelist as well. See the sample_inlist
-! for the list of files that can be operated on.
-!
-! format: list of filenames similar to namelist format with a single line
-! per file with the format of:
-!
-! variable = "filepath"
-!
-! Lines (such as these) with "!" are ignored.
-!
- fatmlndfrc = 'domain.lnd.0.5x0.5_alaskaUSA_gx1v6_c141117.nc'
-stream_fldfilename_popdens = 'clmforc.Li_2012_hdm_0.5x0.5_AVHRR_simyr1850-2010_c141117.nc'
diff --git a/tools/site_and_regional/sample_outlist_T62 b/tools/site_and_regional/sample_outlist_T62
deleted file mode 100644
index 3dfe69148a..0000000000
--- a/tools/site_and_regional/sample_outlist_T62
+++ /dev/null
@@ -1,16 +0,0 @@
-!
-! Sample input file of regional datasets that will be created. You need to have
-! the same list of files as in the input filelist as well. See the sample_inlist
-! for the list of files that can be operated on.
-!
-! format: list of filenames similar to namelist format with a single line
-! per file with the format of:
-!
-! variable = "filepath"
-!
-! Lines (such as these) with "!" are ignored.
-!
- fatmlndfrc = 'domain.lnd.T62_alaskaUSA_c141117.nc'
-
- stream_fldfilename_lightng = 'clmforc.Li_2012_climo1995-2011.T62_alaskaUSA.lnfm_c141117.nc'
- strm_datfil = 'clmforc.Qian.c2006.T62_alaskaUSA.Solr.1948-01.nc'
diff --git a/tools/site_and_regional/sample_outlist_ndep b/tools/site_and_regional/sample_outlist_ndep
deleted file mode 100644
index d3dad24ae7..0000000000
--- a/tools/site_and_regional/sample_outlist_ndep
+++ /dev/null
@@ -1,15 +0,0 @@
-!
-! Sample input file of regional datasets that will be created. You need to have
-! the same list of files as in the input filelist as well. See the sample_inlist
-! for the list of files that can be operated on.
-!
-! format: list of filenames similar to namelist format with a single line
-! per file with the format of:
-!
-! variable = "filepath"
-!
-! Lines (such as these) with "!" are ignored.
-!
- fatmlndfrc = 'domain.lnd.13x12pt_f19_alaskaUSA_gx1v6_c141117.nc'
-
- stream_fldfilename_ndep = 'fndep_clm_hist_simyr1849-2006_13x12pt_f19_alaskaUSA_c141117.nc'
diff --git a/tools/site_and_regional/subset_data b/tools/site_and_regional/subset_data
new file mode 100755
index 0000000000..bb582b21f8
--- /dev/null
+++ b/tools/site_and_regional/subset_data
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+"""
+This is a just top-level skeleton script that calls
+subset_data.py.
+The original code (subset_data.py) is located under
+python/ctsm folder.
+
+For full instructions on how to run the code and different options,
+please check python/ctsm/subset_data.py file.
+This script extracts domain files, surface dataset, and DATM files
+at either a single point or a region using the global dataset.
+
+To run this script the following packages are required:
+ - numpy
+ - xarray
+----------------------------------------------------------------
+To see all available options for single-point/regional subsetting:
+ ./subset_data --help
+
+----------------------------------------------------------------
+Instructions for running on Cheyenne/Casper:
+ load the following into your local environment
+ module load python
+ ncar_pylib
+
+To remove from your environment on Cheyenne/Casper:
+ deactivate
+"""
+
+import os
+import sys
+
+# -- add python/ctsm to path
+_CTSM_PYTHON = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python"
+)
+sys.path.insert(1, _CTSM_PYTHON)
+
+from ctsm.subset_data import main
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/site_and_regional/subset_data.py b/tools/site_and_regional/subset_data.py
deleted file mode 100755
index a649c657b9..0000000000
--- a/tools/site_and_regional/subset_data.py
+++ /dev/null
@@ -1,1182 +0,0 @@
-#! /usr/bin/env python
-"""
-|------------------------------------------------------------------|
-|--------------------- Instructions -----------------------------|
-|------------------------------------------------------------------|
-
-Instructions for running on Cheyenne/Casper:
-
-load the following into your local environment
- module load python
- ncar_pylib
-
--------------------------------------------------------------------
-To see the available options for single point cases:
- ./subset_data.py point --help
-
-To see the available options for regional cases:
- ./subset_data.py reg --help
--------------------------------------------------------------------
-
-This script extracts domain files, surface dataset, and DATM files
-at either a single point or a region using the global dataset.
-
-After creating a case using a global compset, run preview_namelist.
-From the resulting lnd_in file in the run directory, find the name
-of the domain file, and the surface data file.
-From the datm streams files (e.g. datm.streams.txt.CLMGSWP3v1.Precip)
-find the name of the datm forcing data domain file and forcing files.
-Use these file names as the sources for the single point/regional
-files to be created (see below).
-
-After running this script, point to the new CLM domain and surface
-dataset using the user_nl_clm file in the case directory. In addition,
-copy the datm.streams files to the case directory, with the prefix
-'user_', e.g. user_datm.streams.txt.CLMGSWP3v1.Precip. Change the
-information in the user_datm.streams* files to point to the single
-point datm data (domain and forcing files) created using this script.
-
-The domain file is not set via user_nl_clm, but requires changing
-LND_DOMAIN and ATM_DOMAIN (and their paths) in env_run.xml.
-
-Using single point forcing data requires specifying the nearest
-neighbor mapping algorithm for the datm streams (usually they are
-the first three in the list) in user_nl_datm: mapalgo = 'nn','nn','nn',
-..., where the '...' can still be 'bilinear', etc, depending on the
-other streams that are being used, e.g. aerosols, anomaly forcing,
-bias correction.
-
-The file env_mach_pes.xml should be modified to specify a single
-processor. The mpi-serial libraries should also be used, and can be
-set in env_build.xml by changing "MPILIB" to "mpi-serial" prior to
-setting up the case.
-
-The case for the single point simulation should have river routing
-and land ice models turned off (i.e. the compset should use stub
-models SROF and SGLC)
-
--------------------------------------------------------------------
-To run the script for a single point:
- ./subset_data.py point
-
-To run the script for a region:
- ./subset_data.py reg
-
-To remove NPL from your environment on Cheyenne/Casper:
- deactivate
--------------------------------------------------------------------
-
-"""
-# TODO
-# Automatic downloading of missing files if they are missing
-# default 78 pft vs 16 pft
-
-# Import libraries
-from __future__ import print_function
-
-import sys
-import os
-import string
-import logging
-import subprocess
-import argparse
-
-import numpy as np
-import xarray as xr
-
-from datetime import date
-from getpass import getuser
-from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
-
-myname = getuser()
-
-def get_parser():
- """Get parser object for this script."""
- #parser = ArgumentParser(description=__doc__,
- # formatter_class=ArgumentDefaultsHelpFormatter)
- parser = ArgumentParser(description=__doc__,
- formatter_class=argparse.RawDescriptionHelpFormatter)
-
- parser.print_usage = parser.print_help
- subparsers = parser.add_subparsers(
- help='Two possible ways to run this sript, either:',
- dest ='run_type')
- pt_parser = subparsers.add_parser('point',
- help = 'Run script for a single point.')
- rg_parser = subparsers.add_parser('reg',
- help = 'Run script for a region.')
-
-
- pt_parser.add_argument('--lat',
- help='Single point latitude. [default: %(default)s]',
- action="store",
- dest="plat",
- required=False,
- type = plat_type,
- default=42.5)
- pt_parser.add_argument('--lon',
- help='Single point longitude. [default: %(default)s]',
- action="store",
- dest="plon",
- required=False,
- type = plon_type,
- default= 287.8 )
- pt_parser.add_argument('--site',
- help='Site name or tag. [default: %(default)s]',
- action="store",
- dest="site_name",
- required = False,
- type = str,
- default = '')
- pt_parser.add_argument('--create_domain',
- help='Flag for creating CLM domain file at single point. [default: %(default)s]',
- action="store",
- dest="create_domain",
- type = str2bool,
- nargs = '?',
- const = True,
- required = False,
- default = False)
- pt_parser.add_argument('--create_surface',
- help='Flag for creating surface data file at single point. [default: %(default)s]',
- action="store",
- dest="create_surfdata",
- type = str2bool,
- nargs = '?',
- const = True,
- required = False,
- default = True)
- pt_parser.add_argument('--create_landuse',
- help='Flag for creating landuse data file at single point. [default: %(default)s]',
- action="store",
- dest="create_landuse",
- type = str2bool,
- nargs = '?',
- const = True,
- required = False,
- default = False)
- pt_parser.add_argument('--create_datm',
- help='Flag for creating DATM forcing data at single point. [default: %(default)s]',
- action="store",
- dest="create_datm",
- type = str2bool,
- nargs = '?',
- const = True,
- required = False,
- default = False)
- pt_parser.add_argument('--datm_syr',
- help='Start year for creating DATM forcing at single point. [default: %(default)s]',
- action="store",
- dest="datm_syr",
- required = False,
- type = int,
- default = 1901)
- pt_parser.add_argument('--datm_eyr',
- help='End year for creating DATM forcing at single point. [default: %(default)s]',
- action="store",
- dest="datm_eyr",
- required = False,
- type = int,
- default = 2014)
- pt_parser.add_argument('--datm_from_tower',
- help='Flag for creating DATM forcing data at single point for a tower data. [default: %(default)s]',
- action="store",
- dest="datm_tower",
- type = str2bool,
- nargs = '?',
- const = True,
- required = False,
- default = False)
- pt_parser.add_argument('--create_user_mods',
- help='Flag for creating user mods directory . [default: %(default)s]',
- action="store",
- dest="datm_tower",
- type = str2bool,
- nargs = '?',
- const = True,
- required = False,
- default = False)
- pt_parser.add_argument('--user_mods_dir',
- help='Flag for creating user mods directory . [default: %(default)s]',
- action="store",
- dest="user_mod_dir",
- type = str,
- nargs = '?',
- const = True,
- required = False,
- default = False)
- pt_parser.add_argument('--crop',
- help='Create datasets using the extensive list of prognostic crop types. [default: %(default)s]',
- action="store_true",
- dest="crop_flag",
- default=False)
- pt_parser.add_argument('--dompft',
- help='Dominant PFT type . [default: %(default)s] ',
- action="store",
- dest="dom_pft",
- type =int,
- default=7)
- pt_parser.add_argument('--no-unisnow',
- help='Turn off the flag for create uniform snowpack. [default: %(default)s]',
- action="store_false",
- dest="uni_snow",
- default=True)
- pt_parser.add_argument('--no-overwrite_single_pft',
- help='Turn off the flag for making the whole grid 100%% single PFT. [default: %(default)s]',
- action="store_false",
- dest="overwrite_single_pft",
- default=True)
- pt_parser.add_argument('--zero_nonveg',
- help='Set all non-vegetation landunits to zero. [default: %(default)s]',
- action="store",
- dest="zero_nonveg",
- type =bool,
- default=True)
- pt_parser.add_argument('--no_saturation_excess',
- help='Turn off the flag for saturation excess. [default: %(default)s]',
- action="store",
- dest="no_saturation_excess",
- type =bool,
- default=True)
- pt_parser.add_argument('--outdir',
- help='Output directory. [default: %(default)s]',
- action="store",
- dest="out_dir",
- type =str,
- default="/glade/scratch/"+myname+"/single_point/")
-
- rg_parser.add_argument('--lat1',
- help='Region start latitude. [default: %(default)s]',
- action="store",
- dest="lat1",
- required=False,
- type = plat_type,
- default=-40)
- rg_parser.add_argument('--lat2',
- help='Region end latitude. [default: %(default)s]',
- action="store",
- dest="lat2",
- required=False,
- type = plat_type,
- default=15)
- rg_parser.add_argument('--lon1',
- help='Region start longitude. [default: %(default)s]',
- action="store",
- dest="lon1",
- required=False,
- type = plon_type,
- default= 275. )
- rg_parser.add_argument('--lon2',
- help='Region end longitude. [default: %(default)s]',
- action="store",
- dest="lon2",
- required=False,
- type = plon_type,
- default= 330. )
- rg_parser.add_argument('--reg',
- help='Region name or tag. [default: %(default)s]',
- action="store",
- dest="reg_name",
- required = False,
- type = str,
- default = '')
- rg_parser.add_argument('--create_domain',
- help='Flag for creating CLM domain file for a region. [default: %(default)s]',
- action="store",
- dest="create_domain",
- type = str2bool,
- nargs = '?',
- const = True,
- required = False,
- default = False)
- rg_parser.add_argument('--create_surface',
- help='Flag for creating surface data file for a region. [default: %(default)s]',
- action="store",
- dest="create_surfdata",
- type = str2bool,
- nargs = '?',
- const = True,
- required = False,
- default = True)
- rg_parser.add_argument('--create_landuse',
- help='Flag for creating landuse data file for a region. [default: %(default)s]',
- action="store",
- dest="create_landuse",
- type = str2bool,
- nargs = '?',
- const = True,
- required = False,
- default = False)
- rg_parser.add_argument('--create_datm',
- help='Flag for creating DATM forcing data for a region. [default: %(default)s]',
- action="store",
- dest="create_datm",
- type = str2bool,
- nargs = '?',
- const = True,
- required = False,
- default = False)
- rg_parser.add_argument('--datm_syr',
- help='Start year for creating DATM forcing for a region. [default: %(default)s]',
- action="store",
- dest="datm_syr",
- required = False,
- type = int,
- default = 1901)
- rg_parser.add_argument('--datm_eyr',
- help='End year for creating DATM forcing for a region. [default: %(default)s]',
- action="store",
- dest="datm_eyr",
- required = False,
- type = int,
- default = 2014)
- rg_parser.add_argument('--crop',
- help='Create datasets using the extensive list of prognostic crop types. [default: %(default)s]',
- action="store_true",
- dest="crop_flag",
- default=False)
- rg_parser.add_argument('--dompft',
- help='Dominant PFT type . [default: %(default)s] ',
- action="store",
- dest="dom_pft",
- type =int,
- default=7)
- rg_parser.add_argument('--outdir',
- help='Output directory. [default: %(default)s]',
- action="store",
- dest="out_dir",
- type =str,
- default="/glade/scratch/"+myname+"/regional/")
-
- return parser
-
-def str2bool(v):
- """
- Function for converting different forms of
- command line boolean strings to boolean value.
-
- Args:
- v (str): String bool input
-
- Raises:
- if the argument is not an acceptable boolean string
- (such as yes or no ; true or false ; y or n ; t or f ; 0 or 1).
- argparse.ArgumentTypeError: The string should be one of the mentioned values.
-
- Returns:
- bool: Boolean value corresponding to the input.
- """
- if isinstance(v, bool):
- return v
- if v.lower() in ('yes', 'true', 't', 'y', '1'):
- return True
- elif v.lower() in ('no', 'false', 'f', 'n', '0'):
- return False
- else:
- raise argparse.ArgumentTypeError('Boolean value expected. [true or false] or [y or n]')
-
-
-def plat_type(x):
- """
- Function to define lat type for the parser
- and
- raise error if latitude is not between -90 and 90.
- """
- x = float(x)
- if (x < -90) or (x > 90):
- raise argparse.ArgumentTypeError("ERROR: Latitude should be between -90 and 90.")
- return x
-
-
-def plon_type(x):
- """
- Function to define lon type for the parser and
- convert negative longitudes and
- raise error if lon is not between -180 and 360.
- """
- x = float(x)
- if (-180 < x) and (x < 0):
- print ("lon is :", x)
- x= x%360
- print ("after modulo lon is :", x)
- if (x < 0) or (x > 360):
- raise argparse.ArgumentTypeError("ERROR: Latitude of single point should be between 0 and 360 or -180 and 180.")
- return x
-
-def get_git_sha():
- """
- Returns Git short SHA for the currect directory.
- """
- return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode()
-
-class BaseCase :
- """
- Parent class to SinglePointCase and RegionalCase
-
- ...
-
- Attributes
- ----------
- create_domain : bool
- flag for creating domain file
- create_surfdata : bool
- flag for creating surface dataset
- create_landuse : bool
- flag for creating landuse file
- create_datm : bool
- flag for creating DATM files
-
- Methods
- -------
- create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim )
- create 1d coordinate variables to enable sel() method
-
- add_tag_to_filename(filename, tag)
- add a tag and timetag to a filename ending with
- [._]cYYMMDD.nc or [._]YYMMDD.nc
- """
- def __init__(self, create_domain, create_surfdata, create_landuse, create_datm):
- self.create_domain = create_domain
- self.create_surfdata = create_surfdata
- self.create_landuse = create_landuse
- self.create_datm = create_datm
-
- def __str__(self):
- return str(self.__class__) + '\n' + '\n'.join((str(item) + ' = ' + str(self.__dict__[item])
- for item in sorted(self.__dict__)))
-
- @staticmethod
- def create_1d_coord(filename, lon_varname , lat_varname , x_dim , y_dim):
- """
- lon_varname : variable name that has 2d lon
- lat_varname : variable name that has 2d lat
- x_dim: dimension name in X -- lon
- y_dim: dimension name in Y -- lat
- """
- print( "Open file: "+filename )
- f1 = xr.open_dataset(filename)
-
- # create 1d coordinate variables to enable sel() method
- lon0 = np.asarray(f1[lon_varname][0,:])
- lat0 = np.asarray(f1[lat_varname][:,0])
- lon = xr.DataArray(lon0,name='lon',dims=x_dim,coords={x_dim:lon0})
- lat = xr.DataArray(lat0,name='lat',dims=y_dim,coords={y_dim:lat0})
-
- f2=f1.assign({'lon':lon,'lat':lat})
-
- f2.reset_coords([lon_varname,lat_varname])
- f1.close()
- return f2
-
- @staticmethod
- def add_tag_to_filename(filename, tag):
- """
- Add a tag and replace timetag of a filename
- # Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc
- # Add the tag to just before that ending part
- # and change the ending part to the current time tag
- """
- basename = os.path.basename(filename)
- cend = -10
- if ( basename[cend] == "c" ):
- cend = cend - 1
- if ( (basename[cend] != ".") and (basename[cend] != "_") ):
- print ( "Trouble figuring out where to add tag to filename:"+filename )
- os.abort()
- today = date.today()
- today_string = today.strftime("%y%m%d")
- return( basename[:cend]+"_"+tag+"_c"+today_string +'.nc')
-
- @staticmethod
- def update_metadata(nc):
- #update attributes
- today = date.today()
- today_string = today.strftime("%Y-%m-%d")
-
- #get git hash
- sha = get_git_sha()
-
- nc.attrs['Created_on'] = today_string
- nc.attrs['Created_by'] = myname
- nc.attrs['Created_with'] = os.path.abspath(__file__) + " -- "+sha
-
- #delete unrelated attributes if they exist
- del_attrs = ['source_code', 'SVN_url', 'hostname', 'history'
- 'History_Log', 'Logname', 'Host', 'Version',
- 'Compiler_Optimized']
- attr_list = nc.attrs
-
- for attr in del_attrs:
- if attr in attr_list:
- #print ("This attr should be deleted:", attr)
- del(nc.attrs[attr])
-
-
- #for attr, value in attr_list.items():
- # print (attr + " = "+str(value))
-
-
-
-class SinglePointCase (BaseCase):
- """
- A case to encapsulate single point cases.
-
- ...
-
- Attributes
- ----------
- plat : float
- latitude
- plon : float
- longitude
- site_name: str -- default = None
- Site name
-
- Methods
- -------
- create_tag
- create a tag for single point which is the site name
- or the "lon-lat" format if the site name does not exist.
-
- create_domain_at_point
- Create domain file at a single point.
- create_landuse_at_point:
- Create landuse file at a single point.
- create_surfdata_at_point:
- Create surface dataset at a single point.
- create_datmdomain_at_point:
- Create DATM domain file at a single point.
- """
-
- def __init__(self, plat, plon,site_name,
- create_domain, create_surfdata, create_landuse, create_datm,
- overwrite_single_pft, dominant_pft, zero_nonveg_landunits,
- uniform_snowpack, no_saturation_excess):
- super().__init__(create_domain, create_surfdata, create_landuse, create_datm)
- self.plat = plat
- self.plon = plon
- self.site_name = site_name
- self.overwrite_single_pft = overwrite_single_pft
- self.dominant_pft = dominant_pft
- self.zero_nonveg_landunits = zero_nonveg_landunits
- self.uniform_snowpack = uniform_snowpack
- self.no_saturation_excess = no_saturation_excess
-
- def create_tag(self):
- if self.site_name:
- self.tag = self.site_name
- else:
- self.tag=str(self.plon)+'_'+str(self.plat)
-
- @staticmethod
- def create_fileout_name( filename,tag):
-
- basename = os.path.basename(filename)
- items = basename.split('_')
- today = date.today()
- today_string = today.strftime("%y%m%d")
- print (items[-1])
- #new_string = items[0]+"_"+items[2]+"_"+items[3]+"_"+ items[4] \
- # +"_"+items[5]+"_"+items[6]+"_"+tag+"_c"+today_string+".nc"
- new_string = items[0]+"_"+items[2]+"_"+items[3]+"_"+ items[4] \
- +"_"+items[5]+"_"+tag+"_c"+today_string+".nc"
- return new_string
-
- def create_domain_at_point (self):
- print( "----------------------------------------------------------------------")
- print ("Creating domain file at ", self.plon, self.plat)
- # create 1d coordinate variables to enable sel() method
- f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj')
- # extract gridcell closest to plon/plat
- f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest')
- # expand dimensions
- f3 = f3.expand_dims(['nj','ni'])
-
- #update attributes
- self.update_metadata(f3)
- f3.attrs['Created_from'] = self.fdomain_in
-
- wfile=self.fdomain_out
- f3.to_netcdf(path=wfile, mode='w')
- print('Successfully created file (fdomain_out)'+self.fdomain_out)
- f2.close(); f3.close()
-
-
- def create_landuse_at_point (self):
- print( "----------------------------------------------------------------------")
- print ("Creating landuse file at ", self.plon, self.plat, ".")
- # create 1d coordinate variables to enable sel() method
- f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat')
- # extract gridcell closest to plon/plat
- f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest')
-
- # expand dimensions
- f3 = f3.expand_dims(['lsmlat','lsmlon'])
- # specify dimension order
- #f3 = f3.transpose('time','lat','lon')
- f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon')
- #f3['YEAR'] = f3['YEAR'].squeeze()
-
- # revert expand dimensions of YEAR
- year = np.squeeze(np.asarray(f3['YEAR']))
- x = xr.DataArray(year, coords={'time':f3['time']}, dims='time', name='YEAR')
- x.attrs['units']='unitless'
- x.attrs['long_name']='Year of PFT data'
- f3['YEAR'] = x
-
- #update attributes
- self.update_metadata(f3)
- f3.attrs['Created_from'] = self.fluse_in
-
- wfile = self.fluse_out
- # mode 'w' overwrites file
- f3.to_netcdf(path=wfile, mode='w')
- print('Successfully created file (luse_out)'+self.fluse_out,".")
- f2.close(); f3.close()
-
- def create_surfdata_at_point(self):
- print( "----------------------------------------------------------------------")
- print ("Creating surface dataset file at ", self.plon, self.plat, ".")
- # create 1d coordinate variables to enable sel() method
- filename = self.fsurf_in
- f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat')
- # extract gridcell closest to plon/plat
- f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest')
- # expand dimensions
- f3 = f3.expand_dims(['lsmlat','lsmlon']).copy(deep=True)
-
- # modify surface data properties
- if self.overwrite_single_pft:
- f3['PCT_NAT_PFT'][:,:,:] = 0
- if (self.dominant_pft <16):
- f3['PCT_NAT_PFT'][:,:,self.dominant_pft] = 100
- #else:@@@
- if self.zero_nonveg_landunits:
- f3['PCT_NATVEG'][:,:] = 100
- f3['PCT_CROP'][:,:] = 0
- f3['PCT_LAKE'][:,:] = 0.
- f3['PCT_WETLAND'][:,:] = 0.
- f3['PCT_URBAN'][:,:,] = 0.
- f3['PCT_GLACIER'][:,:] = 0.
- if self.uniform_snowpack:
- f3['STD_ELEV'][:,:] = 20.
- if self.no_saturation_excess:
- f3['FMAX'][:,:] = 0.
-
- # specify dimension order
- #f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon')
- f3 = f3.transpose(u'time', u'cft', u'lsmpft', u'natpft', u'nglcec', u'nglcecp1', u'nlevsoi', u'nlevurb', u'numrad', u'numurbl', 'lsmlat', 'lsmlon')
-
- #update lsmlat and lsmlon to match site specific instead of the nearest point
- #f3['lon']= self.plon
- #f3['lat']= self.plat
- f3['lsmlon']= np.atleast_1d(self.plon)
- f3['lsmlat']= np.atleast_1d(self.plat)
- f3['LATIXY'][:,:]= self.plat
- f3['LONGXY'][:,:]= self.plon
-
- #update attributes
- self.update_metadata(f3)
- f3.attrs['Created_from'] = self.fsurf_in
- del(f3.attrs['History_Log'])
- # mode 'w' overwrites file
- f3.to_netcdf(path=self.fsurf_out, mode='w')
- print('Successfully created file (fsurf_out) :'+self.fsurf_out)
- f2.close(); f3.close()
-
- def create_datmdomain_at_point(self):
- print( "----------------------------------------------------------------------")
- print("Creating DATM domain file at ", self.plon, self.plat, ".")
- # create 1d coordinate variables to enable sel() method
- filename = self.fdatmdomain_in
- f2 = self.create_1d_coord(filename, 'xc','yc','ni','nj')
- # extract gridcell closest to plon/plat
- f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest')
- # expand dimensions
- f3 = f3.expand_dims(['nj','ni'])
- wfile=self.fdatmdomain_out
- #update attributes
- self.update_metadata(f3)
- f3.attrs['Created_from'] = self.fdatmdomain_in
- # mode 'w' overwrites file
- f3.to_netcdf(path=wfile, mode='w')
- print('Successfully created file (fdatmdomain_out) :'+self.fdatmdomain_out)
- f2.close(); f3.close()
-
- def extract_datm_at(self, file_in, file_out):
- # create 1d coordinate variables to enable sel() method
- f2 = self.create_1d_coord(file_in, 'LONGXY','LATIXY','lon','lat')
- # extract gridcell closest to plon/plat
- f3 = f2.sel(lon=self.plon,lat=self.plat,method='nearest')
- # expand dimensions
- f3 = f3.expand_dims(['lat','lon'])
- # specify dimension order
- f3 = f3.transpose(u'scalar','time','lat','lon')
-
- #update attributes
- self.update_metadata(f3)
- f3.attrs['Created_from'] = file_in
- # mode 'w' overwrites file
- f3.to_netcdf(path=file_out, mode='w')
- print('Successfully created file :'+ file_out)
- f2.close(); f3.close()
-
- def create_datm_at_point(self):
- print( "----------------------------------------------------------------------")
- print("Creating DATM files at ", self.plon, self.plat, ".")
- #-- specify subdirectory names and filename prefixes
- solrdir = 'Solar/'
- precdir = 'Precip/'
- tpqwldir = 'TPHWL/'
- prectag = 'clmforc.GSWP3.c2011.0.5x0.5.Prec.'
- solrtag = 'clmforc.GSWP3.c2011.0.5x0.5.Solr.'
- tpqwtag = 'clmforc.GSWP3.c2011.0.5x0.5.TPQWL.'
-
- #-- create data files
- infile=[]
- outfile=[]
- for y in range(self.datm_syr,self.datm_eyr+1):
- ystr=str(y)
- for m in range(1,13):
- mstr=str(m)
- if m < 10:
- mstr='0'+mstr
-
- dtag=ystr+'-'+mstr
-
- fsolar=self.dir_input_datm+solrdir+solrtag+dtag+'.nc'
- fsolar2=self.dir_output_datm+solrtag+self.tag+'.'+dtag+'.nc'
- fprecip=self.dir_input_datm+precdir+prectag+dtag+'.nc'
- fprecip2=self.dir_output_datm+prectag+self.tag+'.'+dtag+'.nc'
- ftpqw=self.dir_input_datm+tpqwldir+tpqwtag+dtag+'.nc'
- ftpqw2=self.dir_output_datm+tpqwtag+self.tag+'.'+dtag+'.nc'
-
- infile+=[fsolar,fprecip,ftpqw]
- outfile+=[fsolar2,fprecip2,ftpqw2]
-
- nm=len(infile)
- for n in range(nm):
- print(outfile[n])
- file_in = infile[n]
- file_out = outfile[n]
- self.extract_datm_at(file_in, file_out)
-
-
- print('All DATM files are created in: '+self.dir_output_datm)
-
-class RegionalCase (BaseCase):
- """
- A case to encapsulate regional cases.
- """
-
- def __init__(self, lat1, lat2, lon1, lon2, reg_name,
- create_domain, create_surfdata, create_landuse, create_datm):
- super().__init__(create_domain, create_surfdata, create_landuse, create_datm)
- self.lat1 = lat1
- self.lat2 = lat2
- self.lon1 = lon1
- self.lon2 = lon2
- self.reg_name = reg_name
-
- def create_tag(self):
- if self.reg_name:
- self.tag = self.reg_name
- else:
- self.tag=str(self.lon1)+'-'+str(self.lon2)+'_'+str(self.lat1)+'-'+str(self.lat2)
-
- def create_domain_at_reg (self):
- #print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2)
- print ("Creating domain file at region:", self.tag)
- # create 1d coordinate variables to enable sel() method
- f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj')
- lat = f2['lat']
- lon = f2['lon']
- # subset longitude and latitude arrays
- xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0]
- yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0]
- f3=f2.isel(nj=yind,ni=xind)
-
- #update attributes
- self.update_metadata(f3)
- f3.attrs['Created_from'] = self.fdomain_in
-
- wfile=self.fdomain_out
- # mode 'w' overwrites file
- f3.to_netcdf(path=wfile, mode='w')
- print('Successfully created file (fdomain_out)'+self.fdomain_out)
- f2.close(); f3.close()
-
-
- def create_surfdata_at_reg(self):
- #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2)
- print ("Creating surface dataset file at region:", self.tag)
- # create 1d coordinate variables to enable sel() method
- filename = self.fsurf_in
- f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat')
- lat = f2['lat']
- lon = f2['lon']
- # subset longitude and latitude arrays
- xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0]
- yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0]
- f3=f2.isel(lsmlat=yind,lsmlon=xind)
-
- #update attributes
- self.update_metadata(f3)
- f3.attrs['Created_from'] = self.fsurf_in
-
- # mode 'w' overwrites file
- f3.to_netcdf(path=self.fsurf_out, mode='w', format='NETCDF3_64BIT')
- print('created file (fsurf_out)'+self.fsurf_out)
- #f1.close();
- f2.close(); f3.close()
-
-
- def create_landuse_at_reg (self):
- #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2)
- print ("Creating surface dataset file at region:",self.tag)
- # create 1d coordinate variables to enable sel() method
- f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat')
- lat = f2['lat']
- lon = f2['lon']
- # subset longitude and latitude arrays
- xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0]
- yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0]
- f3=f2.isel(lsmlat=yind,lsmlon=xind)
-
- #update attributes
- self.update_metadata(f3)
- f3.attrs['Created_from'] = self.fluse_in
-
- wfile=self.fluse_out
- # mode 'w' overwrites file
- f3.to_netcdf(path=wfile, mode='w')
- print('Successfully created file (fdomain_out)'+self.fdomain_out)
- f2.close(); f3.close()
-
-
-def setup_logging(log_file, log_level):
- """
- Setup logging to log to console and log file.
- """
-
- root_logger = logging.getLogger()
- root_logger.setLevel(log_level)
-
- # setup log file
- one_mb = 1000000
- handler = logging.handlers.RotatingFileHandler(log_file, maxBytes=one_mb , backupCount=10)
-
- fmt = logging.Formatter(
- '%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
- datefmt='%y-%m-%d %H:%M:%S')
-
- handler.setFormatter(fmt)
- root_logger.addHandler(handler)
-
- # setup logging to console
- stream_handler = logging.StreamHandler(sys.stdout)
- stream_handler.setFormatter(fmt)
- root_logger.addHandler(stream_handler)
-
- # redirect stdout/err to log file
- StreamToLogger.setup_stdout()
- StreamToLogger.setup_stderr()
-
-
-
-class StreamToLogger(object):
- """
- Custom class to log all stdout and stderr streams.
- modified from:
- https://www.electricmonk.nl/log/2011/08/14/redirect-stdout-and-stderr-to-a-logger-in-python/
- """
- def __init__(self, stream, logger, log_level=logging.INFO,
- also_log_to_stream=False):
- self.logger = logger
- self.stream = stream
- self.log_level = log_level
- self.linebuf = ''
- self.also_log_to_stream = also_log_to_stream
-
- @classmethod
- def setup_stdout(cls, also_log_to_stream=True):
- """
- Setup logger for stdout
- """
- stdout_logger = logging.getLogger('STDOUT')
- sl = StreamToLogger(sys.stdout, stdout_logger, logging.INFO, also_log_to_stream)
- sys.stdout = sl
-
- @classmethod
- def setup_stderr(cls, also_log_to_stream=True):
- """
- Setup logger for stdout
- """
- stderr_logger = logging.getLogger('STDERR')
- sl = StreamToLogger(sys.stderr, stderr_logger, logging.ERROR, also_log_to_stream)
- sys.stderr = sl
-
- def write(self, buf):
- temp_linebuf = self.linebuf + buf
- self.linebuf = ''
- for line in temp_linebuf.splitlines(True):
- if line[-1] == '\n':
- self.logger.log(self.log_level, line.rstrip())
- else:
- self.linebuf += line
-
- def flush(self):
- if self.linebuf != '':
- self.logger.log(self.log_level, self.linebuf.rstrip())
- self.linebuf = ''
-
-
-
-
-def main ():
-
- args = get_parser().parse_args()
-
- # --------------------------------- #
-
- today = date.today()
- today_string = today.strftime("%Y%m%d")
-
- pwd = os.getcwd()
-
- log_file = os.path.join(pwd, today_string+'.log')
-
- log_level = logging.DEBUG
- setup_logging(log_file, log_level)
- log = logging.getLogger(__name__)
-
- print("User = "+myname)
- print("Current directory = "+pwd)
-
- # --------------------------------- #
-
- if (args.run_type == "point"):
- print( "----------------------------------------------------------------------------")
- print( "This script extracts a single point from the global CTSM inputdata datasets." )
-
- #-- Specify point to extract
- plon = args.plon
- plat = args.plat
-
- #-- Create regional CLM domain file
- create_domain = args.create_domain
- #-- Create CLM surface data file
- create_surfdata = args.create_surfdata
- #-- Create CLM surface data file
- create_landuse = args.create_landuse
- #-- Create single point DATM atmospheric forcing data
- create_datm = args.create_datm
- datm_syr = args.datm_syr
- datm_eyr = args.datm_eyr
-
- crop_flag = args.crop_flag
-
- site_name = args.site_name
-
- #-- Modify landunit structure
- overwrite_single_pft = args.overwrite_single_pft
- dominant_pft = args.dom_pft
- zero_nonveg_landunits= args.zero_nonveg
- uniform_snowpack = args.uni_snow
- no_saturation_excess = args.no_saturation_excess
-
-
- #-- Create SinglePoint Object
- single_point = SinglePointCase(plat, plon,site_name,
- create_domain, create_surfdata, create_landuse, create_datm,
- overwrite_single_pft, dominant_pft, zero_nonveg_landunits, uniform_snowpack,
- no_saturation_excess)
- single_point.create_tag()
-
-
- print (single_point)
-
- if crop_flag:
- num_pft = "78"
- else:
- num_pft = "16"
-
- print('crop_flag = '+ crop_flag.__str__()+ ' => num_pft ='+ num_pft)
-
- #-- Set input and output filenames
- #-- Specify input and output directories
- dir_output = args.out_dir
- if ( not os.path.isdir( dir_output ) ):
- os.mkdir( dir_output )
-
- dir_inputdata='/glade/p/cesmdata/cseg/inputdata/'
- dir_clm_forcedata='/glade/p/cgd/tss/CTSM_datm_forcing_data/'
- dir_input_datm=os.path.join(dir_clm_forcedata,'atm_forcing.datm7.GSWP3.0.5d.v1.c170516/')
- dir_output_datm=os.path.join(dir_output , 'datmdata/')
- if ( not os.path.isdir( dir_output_datm ) ):
- os.mkdir( dir_output_datm )
-
- print ("dir_input_datm : ", dir_input_datm) #
- print ("dir_output_datm : ", dir_output_datm) #
-
-
- #-- Set time stamp
- today = date.today()
- timetag = today.strftime("%y%m%d")
-
- #-- Specify land domain file ---------------------------------
- fdomain_in = os.path.join(dir_inputdata,'share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc')
- fdomain_out = dir_output + single_point.add_tag_to_filename( fdomain_in, single_point.tag )
- single_point.fdomain_in = fdomain_in
- single_point.fdomain_out = fdomain_out
- print ("fdomain_in :",fdomain_in) #
- print ("fdomain_out :",fdomain_out) #
-
- #-- Specify surface data file --------------------------------
- if crop_flag:
- fsurf_in = os.path.join (dir_inputdata, 'lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc')
- else:
- fsurf_in = os.path.join (dir_inputdata, 'lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc')
-
- #fsurf_out = dir_output + single_point.add_tag_to_filename(fsurf_in, single_point.tag) # remove res from filename for singlept
- fsurf_out = dir_output + single_point.create_fileout_name(fsurf_in, single_point.tag)
- single_point.fsurf_in = fsurf_in
- single_point.fsurf_out = fsurf_out
- print ("fsurf_in :",fsurf_in) #
- print ("fsurf_out :",fsurf_out) #
-
- #-- Specify landuse file -------------------------------------
- if crop_flag:
- fluse_in = os.path.join (dir_inputdata,'lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc')
- else:
- fluse_in = os.path.join (dir_inputdata,'lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc')
- #fluse_out = dir_output + single_point.add_tag_to_filename( fluse_in, single_point.tag ) # remove resolution from filename for singlept cases
- fluse_out = dir_output + single_point.create_fileout_name(fluse_in, single_point.tag)
- single_point.fluse_in = fluse_in
- single_point.fluse_out = fluse_out
- print ("fluse_in :", fluse_in) #
- print ("fluse_out :", fluse_out) #
-
- #-- Specify datm domain file ---------------------------------
- fdatmdomain_in = os.path.join (dir_clm_forcedata,'atm_forcing.datm7.GSWP3.0.5d.v1.c170516/domain.lnd.360x720_gswp3.0v1.c170606.nc')
- fdatmdomain_out = dir_output_datm+single_point.add_tag_to_filename( fdatmdomain_in, single_point.tag )
- single_point.fdatmdomain_in = fdatmdomain_in
- single_point.fdatmdomain_out = fdatmdomain_out
- print ("fdatmdomain_in : ", fdatmdomain_in) #
- print ("fdatmdomain out : ", fdatmdomain_out) #
-
- #-- Create CTSM domain file
- if create_domain:
- single_point.create_domain_at_point()
-
- #-- Create CTSM surface data file
- if create_surfdata:
- single_point.create_surfdata_at_point()
-
- #-- Create CTSM transient landuse data file
- if create_landuse:
- single_point.create_landuse_at_point()
-
- #-- Create single point atmospheric forcing data
- if create_datm:
- single_point.create_datmdomain_at_point()
- single_point.datm_syr =datm_syr
- single_point.datm_eyr =datm_eyr
- single_point.dir_input_datm = dir_input_datm
- single_point.dir_output_datm = dir_output_datm
- single_point.create_datm_at_point()
-
- print( "Successfully ran script for single point." )
- exit()
-
- elif (args.run_type == "reg"):
- print ("Running the script for the region")
- #-- Specify region to extract
- lat1 = args.lat1
- lat2 = args.lat2
-
- lon1 = args.lon1
- lon2 = args.lon2
-
- #-- Create regional CLM domain file
- create_domain = args.create_domain
- #-- Create CLM surface data file
- create_surfdata = args.create_surfdata
- #-- Create CLM surface data file
- create_landuse = args.create_landuse
- #-- Create DATM atmospheric forcing data
- create_datm = args.create_datm
-
- crop_flag = args.crop_flag
-
- reg_name = args.reg_name
-
- region = RegionalCase(lat1, lat2, lon1, lon2, reg_name, create_domain, create_surfdata, create_landuse, create_datm)
-
- print (region)
-
- if crop_flag:
- num_pft = "78"
- else:
- num_pft = "16"
-
-
- print(' crop_flag = '+ crop_flag.__str__()+ ' num_pft ='+ num_pft)
-
-
- region.create_tag()
-
- #-- Set input and output filenames
- #-- Specify input and output directories
- dir_output='/glade/scratch/'+myname+'/region/'
- if ( not os.path.isdir( dir_output ) ):
- os.mkdir( dir_output )
-
- dir_inputdata='/glade/p/cesmdata/cseg/inputdata/'
- dir_clm_forcedata='/glade/p/cgd/tss/CTSM_datm_forcing_data/'
-
- #-- Set time stamp
- command='date "+%y%m%d"'
- x2=subprocess.Popen(command,stdout=subprocess.PIPE,shell='True')
- x=x2.communicate()
- timetag = x[0].strip()
- print (timetag)
-
- #-- Specify land domain file ---------------------------------
- fdomain_in = dir_inputdata+'share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc'
- fdomain_out = dir_output + 'domain.lnd.fv1.9x2.5_gx1v7.'+region.tag+'_170518.nc'
- #SinglePointCase.set_fdomain (fdomain)
- region.fdomain_in = fdomain_in
- region.fdomain_out = fdomain_out
-
- #-- Specify surface data file --------------------------------
- fsurf_in = dir_inputdata+'lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc'
- fsurf_out = dir_output + 'surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_'+region.tag+'_c170824.nc'
- region.fsurf_in = fsurf_in
- region.fsurf_out = fsurf_out
-
- #-- Specify landuse file -------------------------------------
- fluse_in = dir_inputdata+'lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc'
- fluse_out = dir_output + 'landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_'+region.tag+'.c170824.nc'
- region.fluse_in = fluse_in
- region.fluse_out = fluse_out
-
- #-- Create CTSM domain file
- if create_domain:
- region.create_domain_at_reg()
-
- #-- Create CTSM surface data file
- if create_surfdata:
- region.create_surfdata_at_reg()
-
- #-- Create CTSM transient landuse data file
- if create_landuse:
- region.create_landuse_at_reg()
- print( "Successfully ran script for a regional case." )
-
- else :
- # print help when no option is chosen
- get_parser().print_help()
-
-if __name__ == "__main__":
- main()