diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index b0bf2c5a0c..fca4a8315b 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -51,3 +51,4 @@ aa04d1f7d86cc2503b98b7e2b2d84dbfff6c316b
1a49e547ba3c48fa483f9ae81a8f05adcd6b888c
045d90f1d80f713eb3ae0ac58f6c2352937f1eb0
753fda3ff0147837231a73c9c728dd9ce47b5997
+f112ba0bbf96a61d5a4d354dc0dcbd8b0c68145c
diff --git a/.gitignore b/.gitignore
index 27823e7f54..e24a481063 100644
--- a/.gitignore
+++ b/.gitignore
@@ -85,9 +85,6 @@ ctsm.input_data_list.previous
# mksurfdata unit tests
unit_test_build
-# Tools executables
-/tools/mkprocdata_map/mkprocdata_map
-
# run_neon output directories
/tools/site_and_regional/listing.csv
/tools/site_and_regional/????/
diff --git a/README b/README
index 8b4e15e557..c5c06daae7 100644
--- a/README
+++ b/README
@@ -1,6 +1,6 @@
-$CTSMROOT/README 04/19/2023
+$CTSMROOT/README 09/05/2024
-Community Terrestrial Systems Model (CTSM) science version 5.2 series -- source code, tools,
+Community Terrestrial Systems Model (CTSM) science version 5.3 series -- source code, tools,
offline-build and test scripts. This gives you everything you need
to run CTSM with CESM with the CMEPS driver and CDEPS data models to provide CRU NCEP or GSWP3 forcing data in
place of a modeled atmosphere.
@@ -51,7 +51,7 @@ tools ------------- CTSM Offline tools to prepare input datasets and process out
cime_config ------- Configuration files of cime for compsets and CTSM settings
bin/git-fleximod -- Script to manage the needed sub-component source directories (handled with git submodule)
py_env_create ----- Script to setup the python environment for CTSM python tools using conda
-python ------------ Python modules used in tools and testing and automated checking of ALL CTSM python scirpts
+python ------------ Python modules used in tools and testing and automated checking of ALL CTSM python scripts
Directory structure only for a CTSM checkout:
@@ -66,6 +66,8 @@ components/cmeps -------------------- CESM top level driver (for NUOPC driver [w
components/cdeps -------------------- CESM top level data model shared code (for NUOPC driver).
components/cism --------------------- CESM Community land Ice Sheet Model.
components/mosart ------------------- Model for Scale Adaptive River Transport
+components/mizuRoute ---------------- Reached based river transport model for water routing
+ (allows both gridded river and Hydrologic Responce Unit river grids)
components/rtm ---------------------- CESM River Transport Model.
Top level documentation ($CTSMROOT):
@@ -74,6 +76,7 @@ README ------------------- This file
README.md ---------------- File that displays on github under https::/github.com/ESCOMP/CTSM.git
README.rst --------------- File that displays under the project in github
README_GITFLEXIMOD.rst --- Information on how to work with git-fleximod for CTSM
+WhatsNewInCTSM5.3.md ----- Overview document of the changes between ctsm5.2.0 and ctsm5.3.0
CODE_OF_CONDUCT.md ------- Code of Conduct for how to work with each other on the CTSM project
Copyright ---------------- CESM Copyright file
doc/UpdateChangeLog.pl --- Script to add documentation on a tag to the
@@ -104,17 +107,16 @@ run_sys_tests --------------- Python script to send the standard CTSM testing of
parse_cime.cs.status -------- Script to parse test status files cs.status.* created by create_test
(can be used along with run_sys_tests)
doc/Quickstart.GUIDE -------- Quick guide to using NUOPC scripts.
-doc/IMPORTANT_NOTES --------- Some important notes about this version of
- CTSM, configuration modes and namelist items
- that are not validated or functional.
+doc/IMPORTANT_NOTES.md ------ Some important notes about this version of
+ CTSM, configuration modes and namelist items
+ that are not validated or functional.
doc/ChangeLog --------------- Detailed list of changes for each model version.
doc/ChangeSum --------------- Summary one-line list of changes for each
model version.
doc/UsersGuide -------------- CTSM Users Guide
-doc/IMPORTANT_NOTES --------- Some important notes on caveats for some configurations/namelist items
bld/README ------------------ Description of how to use the build-namelist scripts.
-bld/build-namelist ---------- Script to build CTSM namelists.
+bld/build-namelist ---------- Lower level script to build CTSM namelists.
cime_config/buildnml ------------- Build the CTSM namelist for CIME
cime_config/buildlib ------------- Build the CTSM library
@@ -130,8 +132,6 @@ cime_config/usermods_dirs -------- Directories of sets of user-modification subd
tools/mksurfdata_esmf --------- Directory to build program to create surface dataset
at any resolution.
-tools/mkprocdata_map ---------- Process history data from unstructed grids to a gridded
- format.
tools/mkmapgrids -------------- NCL script to create a SCRIP grid file for a regular lat/lon grid (deprecated)
tools/crop_calendars ---------- Tools to process and process and create crop calendar datasets for CTSM
tools/modify_input_files ------ Script to modify existing CTSM input datasets in standard ways
@@ -155,9 +155,9 @@ src/dyn_subgrid --- Dynamic land unit change
src/init_interp --- Online interpolation
scr/fates --------- FATES model and sub-directories
Functionally Assembled Terrestrial Ecosystem Simulator (FATES)
- Experimental Ecosystem Demography model
+ Ecosystem Demography model
src/utils --------- Utility codes
-src/self_tests ---- Internal testing (unit tests run as a part of a CTSM simulation)
+src/self_tests ---- Internal testing (unit tests run as a part of a CTSM system test)
src/unit_test_shr - Unit test shared modules for unit testing
src/unit_test_stubs Unit test stubs that replicate CTSM code simpler
diff --git a/README.md b/README.md
index 045af9f6a1..5e800a0b77 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@
The Community Terrestrial Systems Model.
-This includes the Community Land Model (CLM5.0 and CLM4.5) of the Community Earth System Model.
+This includes the Community Land Model of the Community Earth System Model.
For documentation, quick start, diagnostics, model output and
references, see
@@ -43,7 +43,7 @@ CTSM code management is provided primarily by:
Software engineering team:
- [Erik Kluzek](https://github.com/ekluzek)
- [Bill Sacks](https://github.com/billsacks)
-- [Sam Levis](https://github.com/slevisconsulting)
+- [Sam Levis](https://github.com/slevis-lmwg)
- [Adrianna Foster](https://github.com/adrifoster)
- [Sam Rabin](https://github.com/samsrabin)
- [Greg Lemieux](https://github.com/glemieux)
diff --git a/README_GITFLEXIMOD.rst b/README_GITFLEXIMOD.rst
index de6bbf392f..d1ab767645 100644
--- a/README_GITFLEXIMOD.rst
+++ b/README_GITFLEXIMOD.rst
@@ -67,10 +67,10 @@ Switching to a different CTSM branch or tag
If you have already checked out a branch or tag and **HAVE NOT MADE ANY
MODIFICATIONS** it is simple to change your sandbox. Say that you
-checked out ctsm1.0.0 but really wanted to have ctsm1.1.0;
+checked out ctsm5.2.0 but really wanted to have ctsm5.3.0;
you would simply do the following::
- git checkout ctsm1.1.0
+ git checkout ctsm5.3.0
./bin/git-fleximod update
You should **not** use this method if you have made any source code
diff --git a/WhatsNewInCTSM5.3.md b/WhatsNewInCTSM5.3.md
new file mode 100644
index 0000000000..b1f753081b
--- /dev/null
+++ b/WhatsNewInCTSM5.3.md
@@ -0,0 +1,53 @@
+Purpose and description of changes since ctsm5.2.005
+----------------------------------------------------
+
+Bring in updates needed for the CESM3.0 science capability/functionality "chill". Most importantly bringing
+in: CN Matrix to speed up spinup for the BGC model, updated surface datasets, updated Leung 2023 dust emissions,
+explicit Air Conditioning for the Urban model, updates to crop calendars. For clm6_0 physics these options are now
+default turned on in addition to Sturm snow, and excess ice.
+
+Changes to CTSM Infrastructure:
+===============================
+
+ - manage_externals removed and replaced by git-fleximod
+ - Ability to handle CAM7 in LND_TUNING_MODE
+
+Changes to CTSM Answers:
+========================
+
+ Changes to defaults for clm6_0 physics:
+ - Urban explicit A/C turned on
+ - Snow thermal conductivity is now Sturm_1997
+ - New IC file for f09 1850
+ - New crop calendars
+ - Dust emissions is now Leung_2023
+ - Excess ice is turned on
+ - Updates to MEGAN for BVOC's
+ - Updates to BGC fire method
+
+ Changes for all physics versions:
+
+ - Parameter files updated
+ - FATES parameter file updated
+ - Glacier region 1 is now undefined
+ - Update in FATES transient Land use
+ - Pass active glacier (CISM) runoff directly to river model (MOSART)
+ - Add the option for using matrix for Carbon/Nitrogen BGC spinup
+
+New surface datasets:
+=====================
+
+- With new surface datasets the following GLC fields have region "1" set to UNSET:
+ glacier_region_behavior, glacier_region_melt_behavior, glacier_region_ice_runoff_behavior
+- Updates to allow creating transient landuse timeseries files going back to 1700.
+- Fix an important bug on soil fields that was there since ctsm5.2.0. This results in mksurfdata_esmf now giving identical answers with a change in number of processors, as it should.
+- Add in creation of ne0np4.POLARCAP.ne30x4 surface datasets.
+- Add version to the surface datasets.
+- Remove the --hires_pft option from mksurfdata_esmf as we don't have the datasets for it.
+- Remove VIC fields from surface datasets.
+
+New input datasets to mksurfdata_esmf:
+======================================
+
+- Updates in PFT/LAI/soil-color raw datasets (now from the TRENDY2024 timeseries that ends in 2023), as well as two fire datasets (AG fire, peatland), and the glacier behavior dataset.
+
diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm
index a5b19dd62b..b881cdbfac 100755
--- a/bld/CLMBuildNamelist.pm
+++ b/bld/CLMBuildNamelist.pm
@@ -1665,6 +1665,7 @@ sub process_namelist_inline_logic {
setup_logic_demand($opts, $nl_flags, $definition, $defaults, $nl);
setup_logic_surface_dataset($opts, $nl_flags, $definition, $defaults, $nl, $envxml_ref);
setup_logic_dynamic_subgrid($opts, $nl_flags, $definition, $defaults, $nl);
+ setup_logic_exice($opts, $nl_flags, $definition, $defaults, $nl, $physv);
if ( remove_leading_and_trailing_quotes($nl_flags->{'clm_start_type'}) ne "branch" ) {
setup_logic_initial_conditions($opts, $nl_flags, $definition, $defaults, $nl, $physv);
}
@@ -1894,7 +1895,7 @@ sub process_namelist_inline_logic {
#################################
# namelist group: exice_streams #
#################################
- setup_logic_exice($opts, $nl_flags, $definition, $defaults, $nl, $physv);
+ setup_logic_exice_streams($opts, $nl_flags, $definition, $defaults, $nl, $physv);
##########################################
# namelist group: clm_temperature_inparm #
@@ -2506,8 +2507,9 @@ sub setup_logic_surface_dataset {
# consistent with it
# MUST BE AFTER: setup_logic_demand which is where flanduse_timeseries is set
#
- my ($opts, $nl_flags, $definition, $defaults, $nl, $xmlvar_ref) = @_;
+ my ($opts_in, $nl_flags, $definition, $defaults, $nl, $xmlvar_ref) = @_;
+ my $opts = $opts_in;
$nl_flags->{'flanduse_timeseries'} = "null";
my $flanduse_timeseries = $nl->get_value('flanduse_timeseries');
if (defined($flanduse_timeseries)) {
@@ -2523,6 +2525,11 @@ sub setup_logic_surface_dataset {
if ($flanduse_timeseries ne "null" && &value_is_true($nl_flags->{'use_cndv'}) ) {
$log->fatal_error( "dynamic PFT's (setting flanduse_timeseries) are incompatible with dynamic vegetation (use_cndv=.true)." );
}
+ # Turn test option off for NEON until after XML is interpreted
+ my $test_files = $opts->{'test'};
+ if ( &value_is_true($nl_flags->{'neon'})) {
+ $opts->{'test'} = 0;
+ }
#
# Always get the crop version of the datasets now and let the code turn it into the form desired
# Provided this isn't with FATES on
@@ -2548,7 +2555,7 @@ sub setup_logic_surface_dataset {
'use_crop'=>$nl_flags->{'use_crop'} );
}
#
- # Expand the XML variables for NEON cases so that NEONSITE will be used
+ # Expand the XML variables for NEON cases so that NEONSITE will be used and test for existence
#
if ( &value_is_true($nl_flags->{'neon'}) ) {
my $fsurdat = $nl->get_value($var);
@@ -2557,6 +2564,9 @@ sub setup_logic_surface_dataset {
my $group = $definition->get_group_name($var);
$nl->set_variable_value($group, $var, $newval);
$log->verbose_message( "This is a NEON site and the fsurdat file selected is: $newval" );
+ if ( $test_files and ($newval !~ /null|none/) and (! -f remove_leading_and_trailing_quotes($newval) ) ) {
+ $log->fatal_error("file not found: $var = $newval");
+ }
}
}
}
@@ -2571,10 +2581,12 @@ sub setup_logic_initial_conditions {
#
# MUST BE AFTER: setup_logic_demand which is where flanduse_timeseries is set
# AFTER: setup_logic_irrigate which is where irrigate is set
+ # AFTER: setup_logic_exice which is where use_excess_ice is set
my ($opts, $nl_flags, $definition, $defaults, $nl, $physv) = @_;
my $var = "finidat";
my $finidat = $nl->get_value($var);
+ $nl_flags->{'excess_ice_on_finidat'} = "unknown";
if ( $nl_flags->{'clm_start_type'} =~ /cold/ ) {
if (defined $finidat ) {
$log->warning("setting $var (either explicitly in your user_nl_clm or by doing a hybrid or branch RUN_TYPE)\n is incomptable with using a cold start" .
@@ -2623,7 +2635,7 @@ sub setup_logic_initial_conditions {
$settings{'sim_year'} = $st_year;
}
foreach my $item ( "mask", "maxpft", "irrigate", "glc_nec", "use_crop", "use_cn", "use_cndv",
- "use_fates",
+ "use_fates", "use_excess_ice",
"lnd_tuning_mode",
) {
$settings{$item} = $nl_flags->{$item};
@@ -2644,6 +2656,7 @@ sub setup_logic_initial_conditions {
my $done = 2;
do {
$try++;
+ $nl_flags->{'excess_ice_on_finidat'} = $settings{'use_excess_ice'};
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, %settings );
# If couldn't find a matching finidat file, check if can turn on interpolation and try to find one again
$finidat = $nl->get_value($var);
@@ -2743,6 +2756,13 @@ SIMYR: foreach my $sim_yr ( @sim_years ) {
$log->fatal_error("$useinitvar is being set for you but a $var was not found, so $useinitvar, init_interp_attributes, and finidat must not be set correctly for this configuration in the namelist_default file" );
}
}
+
+ # this check has to be here and not earlier since use_init_interp is set here and hillslope is already set above in setup_logic_hillslope
+ if ( &value_is_true($nl->get_value($useinitvar)) && value_is_true($nl->get_value("use_hillslope")) ) {
+ $log->warning("WARNING: You have set use_hillslope while $useinitvar is TRUE.\n This means all hillslope columns in a gridcell will read identical values" .
+ " from initial conditions. If you are sure you want this behaviour:")
+ }
+
} # end initial conditions
#-------------------------------------------------------------------------------
@@ -4957,18 +4977,39 @@ sub setup_logic_cnmatrix {
#-------------------------------------------------------------------------------
sub setup_logic_exice {
#
- # excess ice streams
+ # excess ice streams, must be set before initial conditions
#
my ($opts, $nl_flags, $definition, $defaults, $nl, $physv) = @_;
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_excess_ice', 'phys'=>$physv->as_string());
my $use_exice = $nl->get_value( 'use_excess_ice' );
+ # Put use_exice into nl_flags so can be referenced later
+ if ( value_is_true($use_exice) ) {
+ $nl_flags->{'use_excess_ice'} = ".true.";
+ } else {
+ $nl_flags->{'use_excess_ice'} = ".false.";
+ }
+}
+
+#-------------------------------------------------------------------------------
+sub setup_logic_exice_streams {
+ #
+ # excess ice streams
+ # Run after initial conditions found as well as after setup_logic_exice
+ #
+ my ($opts, $nl_flags, $definition, $defaults, $nl, $physv) = @_;
+ my $use_exice = $nl_flags->{'use_excess_ice'};
+ my $excess_ice_on_finidat = $nl_flags->{'excess_ice_on_finidat'};
my $use_exice_streams = $nl->get_value( 'use_excess_ice_streams' );
my $finidat = $nl->get_value('finidat');
# If coldstart and use_excess_ice is on:
if ( ( (not defined($use_exice_streams)) && value_is_true($use_exice) ) && string_is_undef_or_empty($finidat) ) {
$nl->set_variable_value('exice_streams', 'use_excess_ice_streams' , '.true.');
$use_exice_streams = '.true.';
- # if excess ice is turned off
+ # If an finidat file was selected and use_excess_ice is on:
+ } elsif ( (not defined($use_exice_streams)) && value_is_true($use_exice) && (not value_is_true($excess_ice_on_finidat)) ) {
+ $nl->set_variable_value('exice_streams', 'use_excess_ice_streams' , '.true.');
+ $use_exice_streams = '.true.';
+ # if excess ice is turned off
} elsif ( (not defined($use_exice_streams)) && (not value_is_true($use_exice)) ) {
$use_exice_streams = '.false.';
# Checking for cold clm_start_type and not finidat here since finidat can be not set set in branch/hybrid runs and
diff --git a/bld/README b/bld/README
index 1e9517b189..feb0b8495c 100644
--- a/bld/README
+++ b/bld/README
@@ -4,16 +4,16 @@ CLM build and configure directory and scripts. Scripts to help
you prepare to build CLM as a component within CESM, and setup
a namelist for it.
+This is a lower level script called from with CESM/CIME.
+
Important files/directories:
---------- Configure and build scripts
---------- (These scripts are also used by the cesm/cime scripts)
+--------- Namelist build scripts
config_files/clm_phys_vers.pm ------------- Perl module to handle different CLM versions
-config_files/config_definition_ctsm.xml --- XML file defining all CTSM configuration items
+config_files/config_definition_ctsm.xml --- XML file defining CTSM configuration items (mainly physics version)
--------- Scripts to build the namelists
---------- (These scripts are also used by the cesm/cime scripts)
build-namelist --- Build the namelists needed
@@ -26,17 +26,17 @@ unit_testers --- Directory of scripts to test scipts in this directory
---------- XML Files describing namelists in namelist_files
namelist_files/namelist_defaults_ctsm.xml --------- List of default values for the ctsm namelist
namelist_files/namelist_defaults_overall.xml ------ List of default values for overall settings
-namelist_files/namelist_defaults_usr_files.xml ---- List of default values for the user-files
-namelist_files/namelist_definition_ctsm.xml -------- Definition of all namelist items for ctsm
+namelist_files/namelist_defaults_usr_files.xml ---- List of default values for the user-files (deprecated)
+namelist_files/namelist_definition_ctsm.xml ------- Definition of all namelist items for ctsm
namelist_files/namelist_definition.xsl ------------ Describes how to view the xml file as html
-namelist_files/namelist_defaults_drydep.xml ------- List of default values for the dry deposition module.
namelist_files/use_cases -------------------------- Specific configurations that build-namelist uses
namelist_files/use_cases/README ------------------- File explaining the naming convention for use_cases
---------- Driver namelist files, duplicated information from cime/driver/cime_config
namelist_files/namelist_defaults_drv.xml ---------- List of default values for driver namelist defaults
-namelist_files/namelist_defaults_drydep.xml ------- List of default values for dry deposition fields
+namelist_files/namelist_defaults_drydep.xml ------- List of default values for dry deposition and MEGAN fields
namelist_files/namelist_defaults_fire_emis.xml ---- List of default values for fire emission fields
+namelist_files/namelist_defaults_dust_emis.xml ---- List of default values for the dust emissions module.
namelist_files/namelist_definition_drv.xml -------- Definition of all driver namelist items
namelist_files/namelist_definition_drv_flds.xml --- Definition of add driver fieldsnamelist items
diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml
index 81ce2704fa..650b21cb8d 100644
--- a/bld/namelist_files/namelist_defaults_ctsm.xml
+++ b/bld/namelist_files/namelist_defaults_ctsm.xml
@@ -238,7 +238,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
.false.
-li2021gswpfrc
+li2024crujrali2016crufrcli2014qianfrc
@@ -513,30 +513,30 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
-'single_at_atm_topo','virtual','virtual','multiple'
-'single_at_atm_topo','virtual','virtual','virtual'
+'single_at_atm_topo','UNSET','virtual','multiple'
+'single_at_atm_topo','UNSET','virtual','virtual'
-'remains_in_place','replaced_by_ice','replaced_by_ice','replaced_by_ice'
+'remains_in_place','UNSET','replaced_by_ice','replaced_by_ice'
-'melted','melted','remains_ice','remains_ice'
+'melted','UNSET','remains_ice','remains_ice'
-lnd/clm2/paramdata/ctsm60_params.c240814.nc
+lnd/clm2/paramdata/ctsm60_params.c240822.nclnd/clm2/paramdata/ctsm51_params.c240814.nclnd/clm2/paramdata/clm50_params.c240814.nclnd/clm2/paramdata/clm45_params.c240814.nc
@@ -886,232 +886,218 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
-->
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.true. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.true. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.true. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.true. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nex=10 do_transient_pfts=.false. lnd_tuning_mode=clm5_1_GSWP3v1
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nex=10 do_transient_pfts=.false. lnd_tuning_mode=clm5_1_GSWP3v1 use_excess_ice=.true.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nex=10 do_transient_pfts=.false. lnd_tuning_mode=clm6_0_GSWP3v1
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nex=10 do_transient_pfts=.false. lnd_tuning_mode=clm6_0_GSWP3v1 use_excess_ice=.true.
hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
-hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nex=10 do_transient_pfts=.false. lnd_tuning_mode=clm5_1_GSWP3v1 use_excess_ice=.true.
-
-hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nex=10 do_transient_pfts=.false. lnd_tuning_mode=clm6_0_GSWP3v1 use_excess_ice=.true.
-hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
-hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
-hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
-
-
-
-hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
-
-
-
-hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=ne120np4.pg3 maxpft=79 mask=tx0.1v3 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne120np4.pg3 maxpft=79 mask=tx0.1v3 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
-hgrid=ne0np4CONUS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne0np4CONUS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
@@ -1119,183 +1105,173 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
-
-
-
-hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
-
-
-
-hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=ne120np4.pg3 maxpft=79 mask=tx0.1v3 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne120np4.pg3 maxpft=79 mask=tx0.1v3 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
-hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
-hgrid=ne0np4CONUS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+>hgrid=ne0np4CONUS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false.
lnd/clm2/initdata_map/clmi.I1850Clm45BgcGs.0901-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
lnd/clm2/initdata_map/clmi.I1850Clm45BgcCruGs.1101-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
lnd/clm2/initdata_map/clmi.B1850Clm45BgcGs.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
lnd/clm2/initdata_map/clmi.B1850Clm45BgcGs.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
@@ -1333,7 +1309,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.I1850Clm50Sp.0181-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
@@ -1341,36 +1317,36 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_esmf/ctsm5.2/clmi.I1850Clm50BgcCrop-ciso.1366-01-01.0.9x1.25_gx1v7_simyr1850_c240223.nc
+>lnd/clm2/initdata_esmf/ctsm5.3/clmi.interp_from.I1850Clm50BgcCrop-ciso.1366-01-01.0.9x1.25_gx1v7_simyr1850_c240223.nc
lnd/clm2/initdata_esmf/ctsm5.2/clmi.I1850Clm50BgcCropCru-ciso.1526-01-01.0.9x1.25_gx1v7_simyr1850_c240223.nc
lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc
lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc
lnd/clm2/initdata_map/clmi.I1850Clm50SpCru.1706-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
@@ -1379,16 +1355,16 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm5_1" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm6_0" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_esmf/ctsm5.2/clmi.I2000Clm50BgcCrop.2011-01-01.1.9x2.5_gx1v7_gl4_simyr2000_c240223.nc
lnd/clm2/initdata_esmf/ctsm5.2/clmi.I2000Clm50BgcCrop.2011-01-01.1.9x2.5_gx1v7_gl4_simyr2000_c240223.nc
lnd/clm2/initdata_esmf/ctsm5.2/clmi.I2000Clm50BgcCrop.2011-01-01.1.9x2.5_gx1v7_gl4_simyr2000_c240223.nc
+ lnd_tuning_mode="clm5_0_GSWP3v1" use_init_interp=".false."
+>lnd/clm2/initdata_esmf/ctsm5.3/clmi.f19_interp_from.I1850Clm50BgcCrop-ciso.1366-01-01.0.9x1.25_gx1v7_simyr1850_c240223.nc
+
+lnd/clm2/initdata_esmf/ctsm5.3/clmi.f19_interp_from.I1850Clm50BgcCrop-ciso.1366-01-01.0.9x1.25_gx1v7_simyr1850_c240223.nc
lnd/clm2/initdata_esmf/ctsm5.2/clmi.I2000Clm50BgcCrop.2011-01-01.1.9x2.5_gx1v7_gl4_simyr2000_c240223.nc
-
+
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm6_0" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_esmf/ctsm5.2/clmi.I2000Clm50BgcCrop.2011-01-01.1.9x2.5_gx1v7_gl4_simyr2000_c240223.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
@@ -1452,7 +1434,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
@@ -1463,14 +1445,14 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
-->
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
@@ -1478,7 +1460,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
@@ -1486,7 +1468,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
@@ -1494,7 +1476,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
@@ -1502,7 +1484,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc
@@ -1512,25 +1494,25 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm5_1" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm6_0" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
@@ -1539,21 +1521,21 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
@@ -1561,7 +1543,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
@@ -1569,7 +1551,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
@@ -1577,7 +1559,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
@@ -1586,15 +1568,15 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm5_1" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
@@ -1603,21 +1585,21 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
@@ -1625,7 +1607,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
@@ -1633,7 +1615,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
@@ -1641,7 +1623,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
@@ -1650,15 +1632,15 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm6_0" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
@@ -1667,7 +1649,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
@@ -1678,14 +1660,14 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
-->
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
@@ -1693,7 +1675,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
@@ -1701,7 +1683,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
@@ -1709,7 +1691,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
@@ -1717,7 +1699,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc
@@ -1725,23 +1707,23 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm5_1" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm6_0" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
@@ -1750,7 +1732,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
@@ -1758,14 +1740,14 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
@@ -1773,7 +1755,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
@@ -1781,7 +1763,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
@@ -1789,7 +1771,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
@@ -1798,15 +1780,15 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm5_1" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
@@ -1815,7 +1797,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
@@ -1823,14 +1805,14 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
@@ -1838,7 +1820,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
@@ -1846,7 +1828,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
@@ -1854,7 +1836,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
@@ -1863,15 +1845,15 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_esmf/ctsm5.3/ctsm52018_f09_xsiceON_acON_pSASU.clm2.r.0201-01-01-00000.nc
+ phys="clm6_0" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.3/ctsm52026_f09_pSASU.clm2.r.0421-01-01-00000.nc
lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
@@ -1880,7 +1862,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
@@ -1891,158 +1873,154 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_0.9x1.25_hist_2000_16pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_0.9x1.25_hist_2000_16pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1.9x2.5_hist_2000_16pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1.9x2.5_hist_2000_16pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_10x15_hist_2000_16pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_10x15_hist_2000_16pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_4x5_hist_2000_16pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_4x5_hist_2000_16pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_mpasa60_hist_2000_16pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_mpasa60_hist_2000_16pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_mpasa15_hist_2000_16pfts_c240216.nc
-
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_mpasa15_hist_2000_16pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_mpasa3p75_hist_2000_16pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_mpasa3p75_hist_2000_16pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_0.9x1.25_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_0.9x1.25_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_0.9x1.25_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_0.9x1.25_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1.9x2.5_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1.9x2.5_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1.9x2.5_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1.9x2.5_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_10x15_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_10x15_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_10x15_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_10x15_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_4x5_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_4x5_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_4x5_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_4x5_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1x1_brazil_hist_2000_78pfts_c240221.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1x1_brazil_hist_2000_78pfts_c240912.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_5x5_amazon_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_5x5_amazon_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne30np4_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne30np4_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne30np4.pg2_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne30np4.pg2_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne30np4.pg3_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne30np4.pg3_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne16np4.pg3_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne16np4.pg3_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_0.125nldas2_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_0.125nldas2_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_T42_hist_2000_78pfts_c240425.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_T42_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_360x720cru_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_360x720cru_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_C96_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_C96_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1x1_numaIA_hist_2000_78pfts_c240221.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1x1_numaIA_hist_2000_78pfts_c240912.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_mpasa480_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_mpasa480_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_mpasa120_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_mpasa120_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne3np4.pg3_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne3np4.pg3_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne120np4.pg3_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne120np4.pg3_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTICGRIS.ne30x8_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne0np4.ARCTICGRIS.ne30x8_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTIC.ne30x4_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne0np4.ARCTIC.ne30x4_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4CONUS.ne30x8_hist_2000_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne0np4CONUS.ne30x8_hist_2000_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1x1_vancouverCAN_hist_2000_78pfts_c240221.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1x1_vancouverCAN_hist_2000_78pfts_c240912.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1x1_mexicocityMEX_hist_2000_78pfts_c240221.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1x1_mexicocityMEX_hist_2000_78pfts_c240912.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1x1_urbanc_alpha_hist_2000_78pfts_c240221.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/synthetic/surfdata_1x1_urbanc_alpha_synth_hist_2000_78pfts_c240912.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_360x720cru_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_360x720cru_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_0.9x1.25_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_0.9x1.25_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1.9x2.5_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1.9x2.5_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_10x15_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_10x15_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_4x5_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_4x5_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_mpasa480_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_mpasa480_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_mpasa120_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_mpasa120_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne30np4_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne30np4_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne30np4.pg2_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne30np4.pg2_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne30np4.pg3_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne30np4.pg3_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne3np4.pg3_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne3np4.pg3_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_C96_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_C96_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1x1_smallvilleIA_hist_1850_78pfts_c240221.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/synthetic/surfdata_1x1_smallvilleIA_synth_hist_1850_78pfts_c240912.nc
-lnd/clm2/surfdata_esmf/surfdata_1x1_cidadinhoBR_hist_2000_78pfts_c240613.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/synthetic/surfdata_1x1_cidadinhoBR_synth_hist_2000_78pfts_c240912.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1x1_brazil_hist_1850_78pfts_c240221.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1x1_brazil_hist_1850_78pfts_c240912.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne3np4.pg3_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne3np4.pg3_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne16np4.pg3_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne16np4.pg3_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne120np4.pg3_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne120np4.pg3_hist_1850_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTICGRIS.ne30x8_hist_1979_78pfts_c240425.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne0np4.ARCTICGRIS.ne30x8_hist_1979_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTIC.ne30x4_hist_1979_78pfts_c240425.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne0np4.ARCTIC.ne30x4_hist_1979_78pfts_c240908.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4CONUS.ne30x8_hist_1979_78pfts_c240425.nc
+lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne0np4CONUS.ne30x8_hist_1979_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_0.9x1.25_PtVeg_nourb_1850_16pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_0.9x1.25_PtVeg_nourb_1850_16pfts_c240908.nc
-lnd/clm2/surfdata_esmf/NEON/16PFT_mixed/surfdata_1x1_NEON_${NEONSITE}_hist_2000_16pfts_c240206.nc
+lnd/clm2/surfdata_esmf/NEON/ctsm5.3.0/16PFT_mixed/surfdata_1x1_NEON_${NEONSITE}_hist_2000_16pfts_c240912.nc
-lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_${NEONSITE}_hist_2000_78pfts_c240206.nc
+lnd/clm2/surfdata_esmf/NEON/ctsm5.3.0/surfdata_1x1_NEON_${NEONSITE}_hist_2000_78pfts_c240912.nc
@@ -2050,45 +2028,45 @@ lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_TOOL_hist_78pfts_CMIP6_simyr2000_c
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240216.nc
+ >lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_1.9x2.5_SSP2-4.5_1850-2100_78pfts_c240216.nc
+ >lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_1.9x2.5_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_4x5_SSP2-4.5_1850-2100_78pfts_c240216.nc
+ >lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_4x5_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_10x15_SSP2-4.5_1850-2100_78pfts_c240216.nc
+ >lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_10x15_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_360x720cru_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_360x720cru_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_1x1_brazil_SSP2-4.5_1850-2100_78pfts_c240221.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_1x1_brazil_SSP2-4.5_1850-2100_78pfts_c240912.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_mpasa120_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_mpasa120_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne3np4.pg3_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne3np4.pg3_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne16np4.pg3_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne16np4.pg3_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne30np4.pg3_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne30np4.pg3_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_C96_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_C96_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_1x1_smallvilleIA_SSP2-4.5_1850-1855_78pfts_c240221.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/synthetic/landuse.timeseries_1x1_smallvilleIA_synth_1850-1855_78pfts_c240908.nc
@@ -2097,57 +2075,57 @@ lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_TOOL_hist_78pfts_CMIP6_simyr2000_c
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP1-2.6_1850-2100_78pfts_c240216.nc
+ >lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_0.9x1.25_SSP1-2.6_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240216.nc
+ >lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_1.9x2.5_SSP2-4.5_1850-2100_78pfts_c240216.nc
+ >lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_1.9x2.5_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_4x5_SSP2-4.5_1850-2100_78pfts_c240216.nc
+ >lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_4x5_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_10x15_SSP2-4.5_1850-2100_78pfts_c240216.nc
+ >lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_10x15_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_360x720cru_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_360x720cru_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_1x1_brazil_SSP2-4.5_1850-2100_78pfts_c240221.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_1x1_brazil_SSP2-4.5_1850-2100_78pfts_c240912.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_mpasa120_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_mpasa120_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne3np4.pg3_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne3np4.pg3_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne16np4.pg3_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne16np4.pg3_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne30np4.pg3_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne30np4.pg3_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_C96_SSP2-4.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_C96_SSP2-4.5_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4.ARCTICGRIS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240425.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne0np4.ARCTICGRIS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4.ARCTIC.ne30x4_SSP2-4.5_1979-2026_78pfts_c240425.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne0np4.ARCTIC.ne30x4_SSP2-4.5_1979-2026_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4CONUS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240425.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne0np4CONUS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP3-7.0_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_0.9x1.25_SSP3-7.0_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP4-6.0_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_0.9x1.25_SSP4-6.0_1850-2100_78pfts_c240908.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP5-8.5_1850-2100_78pfts_c240216.nc
+>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_0.9x1.25_SSP5-8.5_1850-2100_78pfts_c240908.nc
diff --git a/bld/namelist_files/namelist_defaults_fire_emis.xml b/bld/namelist_files/namelist_defaults_fire_emis.xml
index 54e67424d5..ad74eafd16 100644
--- a/bld/namelist_files/namelist_defaults_fire_emis.xml
+++ b/bld/namelist_files/namelist_defaults_fire_emis.xml
@@ -17,6 +17,6 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
'bc_a1 = BC','pom_a1 = 1.4*OC','SO2 = SO2'
-lnd/clm2/firedata/fire_emis_factors_c140116.nc
+lnd/clm2/firedata/fire_emission_factors_78PFTs_c20240624.nc
diff --git a/bld/namelist_files/namelist_definition_ctsm.xml b/bld/namelist_files/namelist_definition_ctsm.xml
index d2347121c1..417444914e 100644
--- a/bld/namelist_files/namelist_definition_ctsm.xml
+++ b/bld/namelist_files/namelist_definition_ctsm.xml
@@ -577,7 +577,7 @@ Only works when running with a non-stub glacier model.
+ valid_values="multiple,virtual,single_at_atm_topo,UNSET" >
Behavior of each glacier region (GLACIER_REGION in surface dataset).
First item corresponds to GLACIER_REGION with ID 0 in the surface dataset,
second to GLACIER_REGION with ID 1, etc.
@@ -590,12 +590,21 @@ Allowed values are:
'single_at_atm_topo': glacier landunits in these grid cells have a single column,
whose elevation matches the atmosphere's topographic height (so that there is no
adjustment due to downscaling)
-Behavior of 'virtual' is required in the region where we have an ice sheet model
+'UNSET': place-holder for non-existent regions
+Most (if not all) of the region where there is an ice sheet model should have a behavior
+of 'virtual': This behavior is needed to compute surface mass balance (SMB) in all
+elevation classes for the sake of vertical downscaling, and is needed to allow two-way
+feedbacks of glacier areas. You are allowed to have gridcells with non-virtual behavior in
+this domain, but this should be minimized: SMB cannot be computed there, and CLM subgrid
+areas will not remain in sync with the GLC model. (Within the icemask - i.e., the active
+glc domain - you are NOT allowed to have gridcells with non-virtual behavior that also
+have glacier_region_melt_behavior='replaced_by_ice': within the icemask, you're only
+allowed to have non-virtual behavior in places where you are not computing SMB).
+ valid_values="replaced_by_ice,remains_in_place,UNSET" >
Treatment of ice melt for each glacier region (GLACIER_REGION in surface dataset).
First item corresponds to GLACIER_REGION with ID 0 in the surface dataset,
second to GLACIER_REGION with ID 1, etc.
@@ -604,17 +613,18 @@ Allowed values are:
this results in positive liquid runoff and negative ice runoff
'remains_in_place': any melted ice remains in place as liquid until it refreezes;
thus, ice melt does not result in any runoff
-IMPORTANT NOTE: Regions with the 'remains_in_place' behavior also do not
-compute SMB (because negative SMB would be pretty much meaningless in
-those regions). Thus, you cannot use this behavior where GLC is
-operating.
-Regions with the 'replaced_by_ice' behavior also compute SMB for the
-vegetated column.
+'UNSET': place-holder for non-existent regions
+IMPORTANT NOTE: Regions with the 'remains_in_place' behavior also do not compute SMB
+(because negative SMB would be pretty much meaningless in those regions). Thus, most (if
+not all) of the region where there is an ice sheet model should have the 'replaced_by_ice'
+behavior; the SMB sent to the GLC model will be 0 in any gridcells with the
+'remains_in_place' behavior.
+Regions with the 'replaced_by_ice' behavior also compute SMB for the vegetated column.
+ valid_values="remains_ice,melted,UNSET" >
Treatment of ice runoff for each glacier region (GLACIER_REGION in surface dataset).
First item corresponds to GLACIER_REGION with ID 0 in the surface dataset,
second to GLACIER_REGION with ID 1, etc.
@@ -625,7 +635,13 @@ Allowed values are:
'melted': ice runoff generated by the CLM physics (primarily due to snow capping) is melted
(generating a negative sensible heat flux) and runs off as liquid; this is appropriate in
regions that have little iceberg calving in reality. This can be important to avoid unrealistic
- cooling of the ocean and consequent runaway sea ice growth.
+ cooling of the ocean and consequent runaway sea ice growth. This option cannot be
+ combined with glacier_region_melt_behavior='replaced_by_ice': While there is nothing
+ fundamentally wrong with this combination, it can result in problematic, non-physical
+ fluxes (particularly, a large positive sensible heat flux during glacial melt in
+ regions where the ice sheet is not fully dynamic and two-way-coupled; see
+ https://github.com/ESCOMP/ctsm/issues/423 for details).
+'UNSET': place-holder for non-existent regions
Only applies when melt_non_icesheet_ice_runoff is .true.
diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl
index 8b2827b069..06c9bcc361 100755
--- a/bld/unit_testers/build-namelist_test.pl
+++ b/bld/unit_testers/build-namelist_test.pl
@@ -42,7 +42,7 @@ sub make_env_run {
my %settings = @_;
# Set default settings
- my %env_vars = ( DIN_LOC_ROOT=>"MYDINLOCROOT", GLC_TWO_WAY_COUPLING=>"FALSE", LND_SETS_DUST_EMIS_DRV_FLDS=>"TRUE", NEONSITE=>"" );
+ my %env_vars = ( DIN_LOC_ROOT=>"MYDINLOCROOT", GLC_TWO_WAY_COUPLING=>"FALSE", LND_SETS_DUST_EMIS_DRV_FLDS=>"TRUE", NEONSITE=>"", PLUMBER2SITE=>"" );
# Set any settings that came in from function call
foreach my $item ( keys(%settings) ) {
$env_vars{$item} = $settings{$item};
@@ -163,10 +163,10 @@ sub cat_and_create_namelistinfile {
#
# Figure out number of tests that will run
#
-my $ntests = 3349;
+my $ntests = 3997;
if ( defined($opts{'compare'}) ) {
- $ntests += 2007;
+ $ntests += 2437;
}
plan( tests=>$ntests );
@@ -393,9 +393,67 @@ sub cat_and_create_namelistinfile {
my $namelistfile = "temp.namelistinfile_$site";
&cat_and_create_namelistinfile( $neondefaultfile, $neonsitefile, $namelistfile );
#
+ # Now run the site for both bgc and non-FATES
+ #
+ foreach my $bgc ( "bgc", "fates") {
+ if ( ($bgc eq "bgc") or ($site ne "STER" and $site ne "KONA")) {
+ my $options = "--res CLM_USRDAT --clm_usr_name NEON --no-megan --bgc $bgc --use_case 2018_control --infile $namelistfile";
+ eval{ system( "$bldnml -envxml_dir . $options > $tempfile 2>&1 " ); };
+ is( $@, '', "options: $options" );
+ $cfiles->checkfilesexist( "$options", $mode );
+ $cfiles->shownmldiff( "default", $mode );
+ if ( defined($opts{'compare'}) ) {
+ $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode );
+ $cfiles->dodiffonfile( "lnd_in", "$options", $mode );
+ $cfiles->comparefiles( "$options", $mode, $opts{'compare'} );
+ }
+ if ( defined($opts{'generate'}) ) {
+ $cfiles->copyfiles( "$options", $mode );
+ }
+ }
+ }
+ system( "/bin/rm $namelistfile" );
+ &cleanup();
+}
+print "\n===============================================================================\n";
+print "Test the PLUMBER2 sites\n";
+print "=================================================================================\n";
+my $phys = "clm6_0";
+$mode = "-phys $phys";
+&make_config_cache($phys);
+my $plumdir = "../../cime_config/usermods_dirs/PLUMBER2";
+foreach my $site (
+ "AR-SLu", "AU-Emr", "AU-TTE", "CA-NS1", "CA-SF3", "CN-HaM", "DE-Obe", "ES-ES1", "FR-Gri", "IE-Dri", "IT-LMa", "IT-SRo", "RU-Fyo", "US-Aud", "US-Ho1", "US-Ne2", "US-Syv", "ZM-Mon",
+ "AT-Neu", "AU-Gin", "AU-Tum", "CA-NS2", "CH-Cha", "CN-Qia", "DE-Seh", "ES-ES2", "FR-Hes", "IT-Amp", "IT-Mal", "JP-SMF", "RU-Zot", "US-Bar", "US-KS2", "US-Ne3", "US-Ton",
+ "AU-ASM", "AU-GWW", "AU-Whr", "CA-NS4", "CH-Dav", "CZ-wet", "DE-SfN", "ES-LgS", "FR-LBr", "IT-BCi", "IT-MBo", "NL-Ca1", "SD-Dem", "US-Bkg", "US-Los", "US-NR1", "US-Tw4",
+ "AU-Cow", "AU-How", "AU-Wrr", "CA-NS5", "CH-Fru", "DE-Bay", "DE-Tha", "ES-LMa", "FR-Lq1", "IT-CA1", "IT-Noe", "NL-Hor", "SE-Deg", "US-Blo", "US-Me2", "US-PFa", "US-Twt",
+ "AU-Cpr", "AU-Lit", "AU-Ync", "CA-NS6", "CH-Oe1", "DE-Wet", "ES-VDA", "FR-Lq2", "IT-CA2", "IT-Non", "NL-Loo", "UK-Gri", "US-Bo1", "US-Me4", "US-Prr", "US-UMB",
+ "AU-Ctr", "AU-Otw", "BE-Bra", "CA-NS7", "CN-Cha", "DE-Geb", "DK-Fou", "FI-Hyy", "FR-Pue", "IT-CA3", "IT-PT1", "PL-wet", "UK-Ham", "US-Cop", "US-Me6", "US-SP1", "US-Var",
+ "AU-Cum", "AU-Rig", "BE-Lon", "CA-Qcu", "CN-Cng", "DE-Gri", "DK-Lva", "FI-Kaa", "GF-Guy", "IT-Col", "IT-Ren", "PT-Esp", "UK-PL3", "US-FPe", "US-MMS", "US-SP2", "US-WCr",
+ "AU-DaP", "AU-Rob", "BE-Vie", "CA-Qfo", "CN-Dan", "DE-Hai", "DK-Ris", "FI-Lom", "HU-Bug", "IT-Cpz", "IT-Ro1", "PT-Mi1", "US-AR1", "US-GLE", "US-MOz", "US-SP3", "US-Whs",
+ "AU-DaS", "AU-Sam", "BR-Sa3", "CA-SF1", "CN-Din", "DE-Kli", "DK-Sor", "FI-Sod", "ID-Pag", "IT-Isp", "IT-Ro2", "PT-Mi2", "US-AR2", "US-Goo", "US-Myb", "US-SRG", "US-Wkg",
+ "AU-Dry", "AU-Stp", "BW-Ma1", "CA-SF2", "CN-Du2", "DE-Meh", "DK-ZaH", "FR-Fon", "IE-Ca1", "IT-Lav", "IT-SR2", "RU-Che", "US-ARM", "US-Ha1", "US-Ne1", "US-SRM", "ZA-Kru"
+ ) {
+ &make_env_run( PLUMBER2SITE=>"$site" );
+ #
+ # Concatonate default usermods and specific sitetogether expanding env variables while doing that
+ #
+ if ( ! -d "$plumdir/$site" ) {
+ print "PLUMBER2 directory is not there: $plumdir/$site\n";
+ die "ERROR:: PLUMBER2 site does not exist: $site\n";
+ }
+ my $plumdefaultfile = "$plumdir/defaults/user_nl_clm";
+ my $plumsitefile = "$plumdir/$site/user_nl_clm";
+ if ( ! -f $plumsitefile ) {
+ $plumsitefile = undef;
+ }
+ $ENV{'PLUMBER2'} = $site;
+ my $namelistfile = "temp.namelistinfile_$site";
+ &cat_and_create_namelistinfile( $plumdefaultfile, $plumsitefile, $namelistfile );
+ #
# Now run the site
#
- my $options = "--res CLM_USRDAT --clm_usr_name NEON --no-megan --bgc bgc --use_case 2018_control --infile $namelistfile";
+ my $options = "--res CLM_USRDAT --clm_usr_name PLUMBER2 --no-megan --bgc sp --infile $namelistfile";
eval{ system( "$bldnml -envxml_dir . $options > $tempfile 2>&1 " ); };
is( $@, '', "options: $options" );
$cfiles->checkfilesexist( "$options", $mode );
@@ -1301,6 +1359,10 @@ sub cat_and_create_namelistinfile {
namelst=>"fsurdat='build-namelist_test.pl'",
phys=>"clm6_0",
},
+ "hillslope with init_interp"=>{ options=>"-bgc bgc -envxml_dir .",
+ namelst=>"use_init_interp=.true.,use_hillslope=.true.",
+ phys=>"clm6_0",
+ },
);
foreach my $key ( keys(%warntest) ) {
print( "$key\n" );
diff --git a/cime_config/SystemTests/lvg.py b/cime_config/SystemTests/lvg.py
index 4b990313f5..d73f4e707b 100644
--- a/cime_config/SystemTests/lvg.py
+++ b/cime_config/SystemTests/lvg.py
@@ -32,12 +32,12 @@ def _case_one_setup(self):
append_to_user_nl_files(
caseroot=self._get_caseroot(),
component="clm",
- contents="glacier_region_behavior = 'single_at_atm_topo', 'virtual', 'virtual', 'multiple'",
+ contents="glacier_region_behavior = 'single_at_atm_topo', 'UNSET', 'virtual', 'multiple'",
)
def _case_two_setup(self):
append_to_user_nl_files(
caseroot=self._get_caseroot(),
component="clm",
- contents="glacier_region_behavior = 'single_at_atm_topo', 'virtual', 'virtual', 'virtual'",
+ contents="glacier_region_behavior = 'single_at_atm_topo', 'UNSET', 'virtual', 'virtual'",
)
diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml
index e18b15a26e..30b19ce862 100644
--- a/cime_config/testdefs/ExpectedTestFails.xml
+++ b/cime_config/testdefs/ExpectedTestFails.xml
@@ -29,7 +29,7 @@
-
+ FAILCDEPS/#243
@@ -37,6 +37,50 @@
+
+
+ FAIL
+ #2787
+ The issue shows how to fix it.
+
+
+
+
+ FAIL
+ #2787
+ The issue shows how to fix it.
+
+
+
+
+ FAIL
+ #2787
+ The issue shows how to fix it.
+
+
+
+
+
+ FAIL
+ #2780
+ Crashes in the matrix solver.
+
+
+
+
+ FAIL
+ #2780
+ Crashes in the matrix solver.
+
+
+
+
+ FAIL
+ #2780
+ Crashes in the matrix solver.
+
+
+
FAIL
@@ -51,6 +95,13 @@
This failure relates to the preceding ERP failure.
+
+
+ FAIL
+ #2619
+ This failure relates to the preceding ERP failure.
+
+
@@ -144,7 +195,7 @@
-
+ FAIL#2653
diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml
index 425fdaaf34..0eb3d5012d 100644
--- a/cime_config/testdefs/testlist_clm.xml
+++ b/cime_config/testdefs/testlist_clm.xml
@@ -31,7 +31,7 @@
-
+
@@ -40,6 +40,7 @@
+
@@ -90,7 +91,7 @@
-
+
@@ -98,6 +99,7 @@
+
@@ -193,80 +195,76 @@
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
-
@@ -349,22 +347,24 @@
-
+
+
-
+
+
@@ -623,112 +623,124 @@
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
-
+
+
@@ -914,29 +926,14 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
@@ -1263,22 +1260,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
@@ -1578,13 +1559,14 @@
-
+
+
@@ -2024,33 +2006,36 @@
-
+
+
-
+
+
-
+
+
-
+
@@ -2059,17 +2044,18 @@
-
+
+
-
+
@@ -2078,7 +2064,7 @@
-
+
@@ -2088,7 +2074,7 @@
-
+
@@ -2097,7 +2083,7 @@
-
+
@@ -2106,13 +2092,14 @@
-
+
+
@@ -2133,13 +2120,14 @@
-
+
+
@@ -2417,15 +2405,6 @@
-
-
-
-
-
-
-
-
-
@@ -2551,7 +2530,7 @@
-
+
@@ -2559,6 +2538,7 @@
+
@@ -3603,7 +3583,6 @@
-
diff --git a/cime_config/testdefs/testmods_dirs/clm/ExcessIceStreams/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/ExcessIceStreams/include_user_mods
index fe0e18cf88..1e4ddf5337 100644
--- a/cime_config/testdefs/testmods_dirs/clm/ExcessIceStreams/include_user_mods
+++ b/cime_config/testdefs/testmods_dirs/clm/ExcessIceStreams/include_user_mods
@@ -1 +1,2 @@
../default
+../nofireemis
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/shell_commands b/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/shell_commands
new file mode 100644
index 0000000000..c97c0dfea7
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/shell_commands
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+./xmlchange CLM_BLDNML_OPTS="-i-clm_demand -flanduse_timeseries" --append
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/user_nl_clm
index b27a74031c..f718010b07 100644
--- a/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/user_nl_clm
@@ -1,2 +1 @@
-flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_4x5_hist_16_CMIP6_1850-2015_c230620.nc'
fates_harvest_mode = 'landuse_timeseries'
diff --git a/cime_config/testdefs/testmods_dirs/clm/Hillslope/shell_commands b/cime_config/testdefs/testmods_dirs/clm/Hillslope/shell_commands
index 6f3602d2e6..a2759b51b4 100644
--- a/cime_config/testdefs/testmods_dirs/clm/Hillslope/shell_commands
+++ b/cime_config/testdefs/testmods_dirs/clm/Hillslope/shell_commands
@@ -2,3 +2,6 @@
DIN_LOC_ROOT=$(./xmlquery --value DIN_LOC_ROOT)
meshfile=$DIN_LOC_ROOT/lnd/clm2/testdata/ESMFmesh_10x15_synthetic_cosphill_1.0.nc
./xmlchange ATM_DOMAIN_MESH=${meshfile},LND_DOMAIN_MESH=${meshfile}
+
+# -ignore_warnings is needed as long as we don't allow use_hillslope and use_init_interp together
+./xmlchange --append CLM_BLDNML_OPTS=-ignore_warnings
diff --git a/cime_config/testdefs/testmods_dirs/clm/Hillslope/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/Hillslope/user_nl_clm
index 4fc6fc2373..d27565d98f 100644
--- a/cime_config/testdefs/testmods_dirs/clm/Hillslope/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/Hillslope/user_nl_clm
@@ -6,6 +6,6 @@ hillslope_transmissivity_method = 'LayerSum'
hillslope_pft_distribution_method = 'PftLowlandUpland'
hillslope_soil_profile_method = 'Uniform'
-fsurdat = '$DIN_LOC_ROOT/lnd/clm2/testdata/surfdata_10x15_hist_2000_78pfts_c240216.synthetic_hillslopes.nc'
+fsurdat = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.3.0/synthetic/surfdata_10x15_hist_2000_78pfts_c240905.synthetic_hillslopes3.nc'
use_ssre = .false.
diff --git a/cime_config/testdefs/testmods_dirs/clm/SNICARFRC/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/SNICARFRC/include_user_mods
index fe0e18cf88..1e4ddf5337 100644
--- a/cime_config/testdefs/testmods_dirs/clm/SNICARFRC/include_user_mods
+++ b/cime_config/testdefs/testmods_dirs/clm/SNICARFRC/include_user_mods
@@ -1 +1,2 @@
../default
+../nofireemis
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60_monthly_matrixcn_soilCN30/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/clm60_monthly_matrixcn_soilCN30/user_nl_clm
index 4bfc0520f8..d51360c82b 100644
--- a/cime_config/testdefs/testmods_dirs/clm/clm60_monthly_matrixcn_soilCN30/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/clm60_monthly_matrixcn_soilCN30/user_nl_clm
@@ -1,2 +1,2 @@
use_soil_matrixcn = .true.
-paramfile = '$DIN_LOC_ROOT/lnd/clm2/paramdata/ctsm60_params_cn30.c240814.nc'
+paramfile = '$DIN_LOC_ROOT/lnd/clm2/paramdata/ctsm60_params_cn30.c240822.nc'
diff --git a/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/README b/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/README
index dbd0696317..f19dd1893e 100644
--- a/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/README
+++ b/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/README
@@ -4,22 +4,6 @@ new combination do_transient_crops = .true. and use_crop = .false. while
exercising the collapse2gencrop branch ability to collapse the full crop data
to clm's generic crops.
-According to the file
-bld/namelist_files/namelist_defaults_ctsm.xml
-the following two files used in this test
-are default files for the following options:
-
-fsurdat = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_10x15_hist_1850_78pfts_c240216.nc'
-hgrid="10x15" sim_year="1850" use_crop=".false." irrigate=".true."
-hgrid="10x15" sim_year="1850" use_crop=".true."
-
-flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_10x15_SSP2-4.5_1850-2100_78pfts_c240216.nc'
--hgrid="10x15" sim_year_range="1850-2000" use_crop=".true."
--hgrid="10x15" rcp="8.5" sim_year_range="1850-2100" use_crop=".true."
--hgrid="10x15" rcp="6" sim_year_range="1850-2100" use_crop=".true."
--hgrid="10x15" rcp="4.5" sim_year_range="1850-2100" use_crop=".true."
--hgrid="10x15" rcp="2.6" sim_year_range="1850-2100" use_crop=".true."
-
This test includes the settings of the decStart test so as to also test the
end-of-year transition since it's an IHist case and transient vegetation gets
updated every new year.
diff --git a/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/user_nl_clm
deleted file mode 100644
index d7be01280b..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/user_nl_clm
+++ /dev/null
@@ -1,2 +0,0 @@
-fsurdat = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_10x15_hist_1850_78pfts_c240216.nc'
-flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_10x15_SSP2-4.5_1850-2100_78pfts_c240216.nc'
diff --git a/cime_config/testdefs/testmods_dirs/clm/f09_dec1990Start_GU_LULCC/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/f09_dec1990Start_GU_LULCC/user_nl_clm
index 0dbc0b4942..e9040ca841 100644
--- a/cime_config/testdefs/testmods_dirs/clm/f09_dec1990Start_GU_LULCC/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/f09_dec1990Start_GU_LULCC/user_nl_clm
@@ -1,5 +1,2 @@
- ! Specify a dataset that has non-zero Gross Unrepresented Land Use change fields on it
- ! And turn it on
- flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240216.nc'
- fsurdat = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_0.9x1.25_hist_1850_78pfts_c240216.nc'
+ ! Turn on Gross Unrepresented Land Use
do_grossunrep = .true.
diff --git a/cime_config/testdefs/testmods_dirs/clm/nofire/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/nofire/include_user_mods
new file mode 100644
index 0000000000..dbad2e75e6
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/nofire/include_user_mods
@@ -0,0 +1 @@
+../nofireemis
diff --git a/cime_config/testdefs/testmods_dirs/clm/nofire/shell_commands b/cime_config/testdefs/testmods_dirs/clm/nofire/shell_commands
index fe06fd1042..bbe1216f7d 100644
--- a/cime_config/testdefs/testmods_dirs/clm/nofire/shell_commands
+++ b/cime_config/testdefs/testmods_dirs/clm/nofire/shell_commands
@@ -1,5 +1,4 @@
-./xmlchange CLM_BLDNML_OPTS="-no-fire_emis" --append
./xmlchange BFBFLAG="TRUE"
diff --git a/cime_config/testdefs/testmods_dirs/clm/nofireemis/shell_commands b/cime_config/testdefs/testmods_dirs/clm/nofireemis/shell_commands
new file mode 100644
index 0000000000..2cef76cc58
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/nofireemis/shell_commands
@@ -0,0 +1,3 @@
+./xmlchange CLM_BLDNML_OPTS="-no-fire_emis" --append
+
+
diff --git a/cime_config/testdefs/testmods_dirs/clm/o3lombardozzi2015/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/o3lombardozzi2015/include_user_mods
index fe0e18cf88..1e4ddf5337 100644
--- a/cime_config/testdefs/testmods_dirs/clm/o3lombardozzi2015/include_user_mods
+++ b/cime_config/testdefs/testmods_dirs/clm/o3lombardozzi2015/include_user_mods
@@ -1 +1,2 @@
../default
+../nofireemis
diff --git a/cime_config/testdefs/testmods_dirs/clm/pauseResume/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/pauseResume/include_user_mods
index fe0e18cf88..1e4ddf5337 100644
--- a/cime_config/testdefs/testmods_dirs/clm/pauseResume/include_user_mods
+++ b/cime_config/testdefs/testmods_dirs/clm/pauseResume/include_user_mods
@@ -1 +1,2 @@
../default
+../nofireemis
diff --git a/cime_config/testdefs/testmods_dirs/clm/prescribed/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/prescribed/include_user_mods
index fe0e18cf88..1e4ddf5337 100644
--- a/cime_config/testdefs/testmods_dirs/clm/prescribed/include_user_mods
+++ b/cime_config/testdefs/testmods_dirs/clm/prescribed/include_user_mods
@@ -1 +1,2 @@
../default
+../nofireemis
diff --git a/cime_config/testdefs/testmods_dirs/clm/pts/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/pts/include_user_mods
index fe0e18cf88..1e4ddf5337 100644
--- a/cime_config/testdefs/testmods_dirs/clm/pts/include_user_mods
+++ b/cime_config/testdefs/testmods_dirs/clm/pts/include_user_mods
@@ -1 +1,2 @@
../default
+../nofireemis
diff --git a/cime_config/testdefs/testmods_dirs/clm/smallville_dynlakes_monthly/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/smallville_dynlakes_monthly/user_nl_clm
index c86418dabd..6223cc203f 100644
--- a/cime_config/testdefs/testmods_dirs/clm/smallville_dynlakes_monthly/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/smallville_dynlakes_monthly/user_nl_clm
@@ -6,7 +6,7 @@ do_transient_lakes = .true.
! Key points are that lake area starts as 0, increases after the first year, then decreases after the second year.
! PCT_CROP is also changed so that PCT_LAKE + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_LAKE in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.)
! Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid.
-flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_1x1_smallvilleIA_SSP2-4.5_1850-1855_78pfts_dynLakes_c240221.nc'
+flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.3.0/synthetic/landuse.timeseries_1x1_smallvilleIA_synth_SSP2-4.5_1850-1855_78pfts_dynLakes_c240912.nc'
! NOTE slevis (2024/2/23) Adding option for tests to pass. In the long term
! ensure that subset_data generates fsurdat and landuse files consistent with
diff --git a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm
index a5bdb76ac3..958265cffc 100644
--- a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm
@@ -7,7 +7,7 @@ do_transient_urban = .true.
! Medium density urban is set to zero to test the memory-saving behavior of PCT_URBAN_MAX.
! PCT_CROP is also changed so that PCT_URBAN + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_URBAN in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.)
! Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid.
-flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_1x1_smallvilleIA_SSP2-4.5_1850-1855_78pfts_dynUrban_c240221.nc'
+flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.3.0/synthetic/landuse.timeseries_1x1_smallvilleIA_synth_SSP2-4.5_1850-1855_78pfts_dynUrban_c240912.nc'
! NOTE slevis (2024/2/23) Adding option for tests to pass. In the long term
! ensure that subset_data generates fsurdat and landuse files consistent with
diff --git a/cime_config/testdefs/testmods_dirs/clm/waccmx_offline/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/waccmx_offline/include_user_mods
index fe0e18cf88..1e4ddf5337 100644
--- a/cime_config/testdefs/testmods_dirs/clm/waccmx_offline/include_user_mods
+++ b/cime_config/testdefs/testmods_dirs/clm/waccmx_offline/include_user_mods
@@ -1 +1,2 @@
../default
+../nofireemis
diff --git a/cime_config/usermods_dirs/NEON/FATES/defaults/user_nl_clm b/cime_config/usermods_dirs/NEON/FATES/defaults/user_nl_clm
index 1a9847a69b..e49d110d51 100644
--- a/cime_config/usermods_dirs/NEON/FATES/defaults/user_nl_clm
+++ b/cime_config/usermods_dirs/NEON/FATES/defaults/user_nl_clm
@@ -18,9 +18,6 @@
! Set glc_do_dynglacier with GLC_TWO_WAY_COUPLING env variable
!----------------------------------------------------------------------------------
-flanduse_timeseries = ' ' ! This isn't needed for a non transient case, but will be once we start using transient compsets
-fsurdat = "$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/NEON/16PFT_mixed/surfdata_1x1_NEON_${NEONSITE}_hist_2000_16pfts_c240206.nc"
-
! h1 output stream
hist_fincl2 = 'FATES_AUTORESP','FCEV','FCTR','FGEV','FIRA','FSA','FSH','FATES_GPP','FATES_GPP_PF','H2OSOI',
'SNOW_DEPTH','TBOT','TSOI','SOILC_vr','FATES_NPP','FATES_NPP_PF','FATES_VEGC','FATES_VEGC_PF'
diff --git a/cime_config/usermods_dirs/NEON/defaults/user_nl_clm b/cime_config/usermods_dirs/NEON/defaults/user_nl_clm
index b73da1f33e..f0e7142990 100644
--- a/cime_config/usermods_dirs/NEON/defaults/user_nl_clm
+++ b/cime_config/usermods_dirs/NEON/defaults/user_nl_clm
@@ -18,9 +18,6 @@
! Set glc_do_dynglacier with GLC_TWO_WAY_COUPLING env variable
!----------------------------------------------------------------------------------
-flanduse_timeseries = ' ' ! This isn't needed for a non transient case, but will be once we start using transient compsets
-fsurdat = "$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_${NEONSITE}_hist_2000_78pfts_c240206.nc"
-
! h1 output stream
hist_fincl2 = 'AR','ELAI','FCEV','FCTR','FGEV','FIRA','FSA','FSH','GPP','H2OSOI',
'HR','SNOW_DEPTH','TBOT','TSOI','SOILC_vr','FV','NET_NMIN_vr'
diff --git a/cime_config/usermods_dirs/PLUMBER2/defaults/user_nl_clm b/cime_config/usermods_dirs/PLUMBER2/defaults/user_nl_clm
index fe23065798..5216afb381 100644
--- a/cime_config/usermods_dirs/PLUMBER2/defaults/user_nl_clm
+++ b/cime_config/usermods_dirs/PLUMBER2/defaults/user_nl_clm
@@ -19,6 +19,6 @@
!----------------------------------------------------------------------------------
flanduse_timeseries = ' ' ! This isn't needed for a non transient case, but will be once we start using transient compsets
-fsurdat = "$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/PLUMBER2/surfdata_1x1_PLUMBER2_${PLUMBER2SITE}_hist_2000_16pfts_c240326.nc"
+fsurdat = "$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/PLUMBER2/ctsm5.3.0/surfdata_1x1_PLUMBER2_${PLUMBER2SITE}_hist_2000_16pfts_c240912.nc"
! custom namelist changes for each site / case
diff --git a/cime_config/usermods_dirs/_includes/cmip6_glaciers_virtual_antarctica/user_nl_clm b/cime_config/usermods_dirs/_includes/cmip6_glaciers_virtual_antarctica/user_nl_clm
index 3486d7abfb..9aeba5c9d3 100644
--- a/cime_config/usermods_dirs/_includes/cmip6_glaciers_virtual_antarctica/user_nl_clm
+++ b/cime_config/usermods_dirs/_includes/cmip6_glaciers_virtual_antarctica/user_nl_clm
@@ -5,5 +5,5 @@
! This differs from the default in that it turns on virtual columns over Antarctica
! This is desired so that we have the output needed to drive a later offline CISM Antarctica simulation
! However, this increases the cost of CLM by about 10%
-glacier_region_behavior = 'single_at_atm_topo', 'virtual', 'virtual', 'virtual'
+glacier_region_behavior = 'single_at_atm_topo', 'UNSET', 'virtual', 'virtual'
diff --git a/doc/ChangeLog b/doc/ChangeLog
index e553a59730..ded12c54f8 100644
--- a/doc/ChangeLog
+++ b/doc/ChangeLog
@@ -1,4 +1,459 @@
===============================================================
+Tag name: ctsm5.3.001
+Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310)
+Date: Thu 26 Sep 2024 03:10:40 PM MDT
+One-line Summary: Merge b4b-dev
+
+Purpose and description of changes
+----------------------------------
+
+- Keith fixed comments on urban thermal variables.
+- Sam R removed references to PTCLM*.
+- Sam R made updates to the documentation.
+- Sam R made improvements to mesh_plotter.
+- Matvey and Sam R worked on hillslope hydrology warnings.
+- Matvey added a test and a warning.
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+PR #2770 documentation fixes and improvements
+PR #2772 mesh_plotter improvements
+PR #2765 fix comments on urban thermal variables
+PR #2703 add a namelist warning
+
+Notes of particular relevance for users
+---------------------------------------
+Changes to documentation:
+ Yes, see "bugs fixed" above.
+
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Changes to tests or testing:
+ Test(s) added to bld/unit_testers/build-namelist_test.pl
+
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+
+Answer changes
+--------------
+Changes answers relative to baseline: No
+
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/2792
+
+===============================================================
+===============================================================
+Tag name: ctsm5.3.0
+Originator(s): multiple (see below)
+Date: Wed 25 Sep 2024 01:22:06 AM MDT
+One-line Summary: Update surface datasets, CN Matrix, CLM60: excess ice on, explicit A/C on, crop calendars, Sturm snow, Leung dust emissions, prigent roughness data
+
+Purpose and description of changes since ctsm5.2.005
+----------------------------------------------------
+
+Bring in updates needed for the CESM3.0 science capability/functionality "chill". Most importantly bringing
+in: CN Matrix to speed up spinup for the BGC model, updated surface datasets, updated Leung 2023 dust emissions,
+explicit Air Conditioning for the Urban model, updates to crop calendars. For clm6_0 physics these options are now
+default turned on in addition to Sturm snow, and excess ice.
+
+Changes to CTSM Infrastructure:
+===============================
+
+ - manage_externals removed and replaced by git-fleximod
+ - Ability to handle CAM7 in LND_TUNING_MODE
+
+Changes to CTSM Answers:
+========================
+
+ Changes to defaults for clm6_0 physics:
+ - Urban explicit A/C turned on
+ - Snow thermal conductivity is now Sturm_1997
+ - New IC file for f09 1850
+ - New crop calendars
+ - Dust emissions is now Leung_2023
+ - Excess ice is turned on
+ - Updates to MEGAN for BVOC's
+ - Updates to BGC fire method
+
+ Changes for all physics versions:
+
+ - Parameter files updated
+ - FATES parameter file updated
+ - Glacier region 1 is now undefined
+ - Update in FATES transient Land use
+ - Pass active glacier (CISM) runoff directly to river model (MOSART)
+ - Add the option for using matrix for Carbon/Nitrogen BGC spinup
+
+New surface datasets:
+=====================
+
+- With new surface datasets the following GLC fields have region "1" set to UNSET:
+ glacier_region_behavior, glacier_region_melt_behavior, glacier_region_ice_runoff_behavior
+- Updates to allow creating transient landuse timeseries files going back to 1700.
+- Fix an important bug on soil fields that was there since ctsm5.2.0. This results in mksurfdata_esmf now giving identical answers with a change in number of processors, as it should.
+- Add in creation of ne0np4.POLARCAP.ne30x4 surface datasets.
+- Add version to the surface datasets.
+- Remove the --hires_pft option from mksurfdata_esmf as we don't have the datasets for it.
+- Remove VIC fields from surface datasets.
+
+New input datasets to mksurfdata_esmf:
+======================================
+
+- Updates in PFT/LAI/soil-color raw datasets (now from the TRENDY2024 timeseries that ends in 2023), as well as two fire datasets (AG fire, peatland), and the glacier behavior dataset.
+
+Contributors (alphabetical order by github handle)
+------------
+
+ @adrifoster
+ @billsacks
+ @cathyxinchangli
+ @chrislxj
+ @dmleung
+ @ekluzek
+ @Face2sea
+ @fang-bowen
+ @glemieux
+ @HuiWangWanderInGitHub
+ @jedwards
+ @jenniferholm
+ @jfkok
+ @KateC
+ @keerzhang1
+ @lawrencepj1
+ @lifang0209
+ @linniahawkins
+ @mvdebolskiy
+ @mvertens
+ @olyson
+ @rgknox
+ @samsrabin
+ @slevis-lmwg
+ @TeaganKing
+ @wwieder
+
+Significant changes to scientifically-supported configurations since ctsm5.2.005
+--------------------------------------------------------------------------------
+
+glacier_behavior namelist items changed for clm4_5 and clm5_0 physics
+parameter and surface datasets updated for all physics options
+
+Notes of particular relevance for users for ctsm5.2.005 to ctsm5.3.0
+--------------------------------------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+ With the updated surface datasets all finidat files being used will need to
+ be run with use_init_interp = TRUE
+
+Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables):
+ New namelist items or options:
+ fire_method -- new options: li2024gswpfrc,li2024crujra
+
+ new fire options:
+ max_rh30_affecting_fuel
+ defo_fire_precip_thresh_bet
+ defo_fire_precip_thresh_bdt
+ nonborpeat_fire_precip_denom
+ borpeat_fire_soilmoist_denom
+
+ new fates options:
+ fates_harvest_mode
+ use_fates_lupft
+ use_fates_potentialveg
+ flandusepftdat
+
+ new physics options:
+ hillslope_fsat_equals_zero
+ urban_explicit_ac
+ use_prigent_roughness
+ stream_fldfilename_prigentroughness
+ stream_meshfile_prigentroughness
+ excess_ice_coldstart_temp
+ excess_ice_coldstart_depth
+
+ new cropcal options:
+ stream_year_last_cropcal_swindows
+ model_year_align_cropcal_swindows
+ stream_year_first_cropcal_cultivar_gdds
+ stream_year_last_cropcal_cultivar_gdds
+ model_year_align_cropcal_cultivar_gdds
+ stream_fldFileName_gdd20_baseline
+ stream_gdd20_seasons
+ flush_gdd20
+ allow_invalid_gdd20_season_inputs
+ stream_fldFileName_gdd20_season_start
+ stream_fldFileName_gdd20_season_end
+
+ cropcal options with names changed:
+ stream_year_first_cropcal => cropcals_rx
+ stream_year_last_cropcal => cropcals_rx_adapt
+ model_year_align_cropcal => stream_year_first_cropcal_swindows
+
+ new CN matrix options:
+ use_matrixcn
+ use_soil_matrixcn
+ hist_wrt_matrixcn_diag
+ spinup_matrixcn
+ nyr_forcing
+ nyr_sasu
+ iloop_avg
+
+ Namelist items removed:
+ use_fates_logging
+ use_dynroot
+
+ Changes to XML options:
+
+ LND_TUNING_MODE: New forcing options for cam4.0, cam5.0, and cam7.0
+ CLM_BLDNML_OPTS: "-bgc cn" option removed
+ CLM_ACCELERATED_SPINUP: sasu option added
+ LND_SETS_DUST_EMIS_DRV_FLDS: New option to determine if CAM or CTSM set dust emission options
+ PLUMBER2SITE: New option to run for PLUMBER2 tower site locations
+
+Changes made to namelist defaults (e.g., changed parameter values):
+
+Changes to the datasets (e.g., parameter, surface or initial files):
+ parameter files updated
+ surface datasets updated
+ many finidat files updated
+
+Things being deprecated (which will be removed):
+ Running with VIC hydrology being deprecated with testing removed
+ mkprocdata_map was removed
+ test/tools testing framework was removed
+
+Testing summary: release testing (regular ctsm_sci fates mosart rtm mksurfdata_esmf python)
+----------------
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS (1710 namelists differ from ctsm5.2.005)
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+ fates tests: (give name of baseline if different from CTSM tagname, normally fates baselines are fates--)
+ derecho ----- OK
+ izumi ------- OK
+
+ any other testing (give details below):
+
+ mksurfdata_esmf
+ derecho ---- run "make all" to create all datasets (completes in 6 hours)
+ (crop-global-SSP2-4.5-ne30 was longest at 6 hr)
+
+ ctsm_sci
+ derecho ---- OK
+
+ mosart
+ derecho ---- OK
+ izumi ------ PASS
+
+ rtm
+ derecho ---- OK
+
+
+Simulations to go with ctsm5.3.0:
+---------------------------------
+
+ https://github.com/NCAR/LMWG_dev/issues/69
+ https://github.com/NCAR/LMWG_dev/issues/65
+
+Other details
+-------------
+
+List git submodules:
+
+fates = sci.1.78.2_api.36.0.0
+cism = cismwrap_2_2_002
+rtm = rtm1_0_80
+mosart = mosart1.1.02
+mizuRoute = cesm-coupling.n02_v2.1.2
+ccs_config = ccs_config_cesm1.0.0
+cime = cime6.0.246
+cmeps = cmeps0.14.77
+cdeps = cdeps1.0.48
+share = share1.0.19
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.029
+Originator(s): multiple (see contributors below)
+Date: Wed 25 Sep 2024 01:22:06 AM MDT
+One-line Summary: Update surface datasets: double tag of ctsm5.3.0
+
+Purpose and description of changes
+----------------------------------
+
+New surface datasets because of updates in PFT/LAI/soil-color datasets, as well as two fire datasets (AG fire, pearland and the glacier behavior dataset. Also bring in an updated "PPE informed, hand-tuned" parameter file. And turn on Li2024 fire method. Also updates to allow creating transient landuse timeseries files going back to 1700. Fix an important bug on soil fields that was there since ctsm5.2.0. This results in mkaurfdata_esmf giving identical answers with a change in number of processors. Add in creation of ne0np4.POLARCAP.ne30x4 surface datasets.
+
+Asides: Remove VIC fields from surface datasets and testing. Add version to the surface datasets. Update the fire emissions factor dataset for the fire emissions testing in CTSM. Remove the --hires_pft option from mksurfdata_esmf as we don't have the datasets for it. Also delete mkprocdata_map.
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[X] clm6_0
+
+[X] clm5_1
+
+[X] clm5_0
+
+[X] ctsm5_0-nwp
+
+[X] clm4_5
+
+
+Bugs fixed
+----------
+
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+ Fixes #2723 -- Version string on CTSM surface datasets to check for compatability
+ Partly addressed #2752 -- NEON surface datasets are defined in two places -- should just be one in namelist_defaults_ctsm.xml
+ Partly addressed #2672 -- Excess ice testing
+ Fixes #2720 -- Make polarcap surface datasets in ctsm5.3.0, remove VR 1850 datasets
+ Fixes #2452 -- Run mksurfdata_esmf with the new vegetation raw data
+ Fixes #2570 -- Historical configuration for CLM6
+ Fixes #2618 -- Update fire variables on surface dataset
+ Fixes #423 -- Update fire variables on surface dataset
+ Fixes #2734 -- Update fire emissions factors
+ Fixes #2744 -- Soil level clm surface input data for clm5.2.0 have missing values in large domains
+ Fixes #2502 -- fsurdat: PCT_SAND, PCT_CLAY, ORGANIC differ with different PE layouts on derecho
+ Fixes #2748 -- Update hillslope datasets for 5.3
+ Fixes #2773 -- Deprecate and remove mkprocdata_map
+
+Contributors
+------------
+ @slevis-lmwg @ekluzek @lawrencepj1 @wwieder @adrifoster @samsrabin @billsacks @lifang0209 @linniahawkins @olyson
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+ New surface datasets means initial condition files created without ctsm5.3.0 surface
+ datasets will need to be interpolated
+
+Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables):
+ With new surface datasets the following GLC fields have region "1" set to UNSET:
+ glacier_region_behavior, glacier_region_melt_behavior, glacier_region_ice_runoff_behavior
+ For mksurfdata_esmf, the --vic and --hires_pft options were removed
+
+Changes made to namelist defaults (e.g., changed parameter values):
+ glacier_region_behavior updated so that region ID 1 is UNSET
+
+Changes to the datasets (e.g., parameter, surface or initial files):
+ parameter file updated
+ surface datasets updatd
+ f09 1850 finidat file updated (also used for 2000)
+ fire emissions factor file updated
+
+Changes to documentation:
+ Technical Note documentation on glacier region updated
+
+Notes of particular relevance for developers:
+---------------------------------------------
+
+Caveats for developers (e.g., code that is duplicated that requires double maintenance):
+ Region 1 for glacier region is now undefined on purpose
+
+Changes to tests or testing:
+ VIC tests removed
+
+Testing summary: release testing (regular ctsm_sci fates mosart rtm mksurfdata_esmf python)
+----------------
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS (1433 namelists differ compared to ctsm5.2.028)
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: Yes! New surface datasets so answers change for ALL simulations!
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: ALL
+ - what platforms/compilers: ALL
+ - nature of change (roundoff; larger than roundoff/same climate; new climate):
+ new climate
+
+ If this tag changes climate describe the run(s) done to evaluate the new
+ climate (put details of the simulations in the experiment database)
+ - casename: slevis ctsm52026_f09_pSASU
+
+ URL for LMWG diagnostics output used to validate new climate:
+ https://github.com/NCAR/LMWG_dev/issues/65
+
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+(https://github.com/ESCOMP/ctsm/pull)
+ #2500 -- ctsm5.3.0: New pft/lai/glc-behavior/soilcolor/fire datasets
+
+===============================================================
+===============================================================
Tag name: ctsm5.2.028
Originator(s): rgknox (Ryan Knox, LBNL, rgknox@lbl.gov)
glemieux (Gregory Lemieux, LBNL, glemieux@lbl.gov)
@@ -95,7 +550,7 @@ Bring b4b-dev branch to main CTSM development.
Update cs.status parsing script to make expected BASELINE fails more obvious
-Fix some issues with finding IC files for certain lnd_tuning_modes: all for cam7,
+Fix some issues with finding IC files for certain lnd_tuning_modes: all for cam7,
clm5_0_cam6.0, and clm6_0_cam6.0
Significant changes to scientifically-supported configurations
diff --git a/doc/ChangeSum b/doc/ChangeSum
index 45bab07440..1014612b40 100644
--- a/doc/ChangeSum
+++ b/doc/ChangeSum
@@ -1,5 +1,8 @@
Tag Who Date Summary
============================================================================================================================
+ ctsm5.3.001 multiple 09/26/2024 Merge b4b-dev
+ ctsm5.3.0 multiple 09/24/2024 Update surface datasets, CN Matrix, CLM60: excess ice on, explicit A/C on, crop calendars, Sturm snow, Leung dust emissions, prigent roughness data
+ ctsm5.2.029 multiple 09/24/2024 New surface datasets: double tag of ctsm5.3.0
ctsm5.2.028 rgknox 09/05/2024 FATES history flushing update
ctsm5.2.027 erik 08/28/2024 Merge b4b-dev
ctsm5.2.026 slevis 08/23/2024 Turn on excess ice, explicit AC, Leung_2023 for clm6
diff --git a/README.NUOPC_driver.md b/doc/README.NUOPC_driver.md
similarity index 100%
rename from README.NUOPC_driver.md
rename to doc/README.NUOPC_driver.md
diff --git a/doc/source/how-to-make-mesh.md b/doc/source/how-to-make-mesh.md
deleted file mode 100644
index 0620598e05..0000000000
--- a/doc/source/how-to-make-mesh.md
+++ /dev/null
@@ -1,98 +0,0 @@
-# Creating an ESMF mesh file from a netCDF file
-
-This gist includes instructions for creating and visualizing a mesh file from a netcdf file with valid 1D or 2D lats and lons coordinates.
-
-* **ESMF Mesh file** aka **Unstructured Grid File Format** is a netcdf file (format) that includes the information about the grids coordinates and their connectivity to each other.
-
-Additional information about ESMF mesh files are available [here](https://earthsystemmodeling.org/docs/release/ESMF_8_0_1/ESMF_refdoc/node3.html#SECTION03028200000000000000).
-
-------
-
-In this example, we will use `./mesh_maker.py` which uses `mesh_type.py` to create a mesh file and visualize it.
-
-1- First clone my fork and branch that includes these capabilities:
-``` Shell
-git clone https://github.com/negin513/ctsm.git ctsm_mesh
-cd ctsm_mesh
-
-git checkout subset_mesh_dask
-```
-
-2- Next run mesh_maker.py for a netcdf file:
-
-```
-cd tools/site_and_regional
-```
-Check all the avaialble options:
-
-```
-./mesh_maker.py --help
-```
-
-The output shows all available options for this script:
-```
-|------------------------------------------------------------------|
-|--------------------- Instructions -----------------------------|
-|------------------------------------------------------------------|
-This script creates ESMF unstructured GRID (mesh file) from a netcdf
-file with valid lats and lons. Provided lats and lons can be 1D or 2D.
-
-For example for running WRF-CTSM cases, the user can create a mesh
-file for their domain :
- ./mesh_maker.py --input wrfinput_d01 --output my_region
- --lat XLAT --lon XLONG --verbose
-
-optional arguments:
- -h, --help show this help message and exit
- --input INPUT Netcdf input file for creating ESMF mesh.
- --output OUTPUT Name of the ESMF mesh created.
- --outdir OUT_DIR Output directory (only if name of output mesh is not
- defined)
- --lat LAT_NAME Name of latitude varibale on netcdf input file. If none
- given, looks to find variables that include 'lat'.
- --lon LON_NAME Name of latitude varibale on netcdf input file. If none
- given, looks to find variables that include 'lon'.
- --mask MASK_NAME Name of mask varibale on netcdf input file. If none given,
- create a fake mask with values of 1.
- --area AREA_NAME Name of area variable on netcdf input file. If none given,
- ESMF calculates element areas automatically.
- --overwrite If meshfile exists, overwrite the meshfile.
- -v, --verbose Increase output verbosity
-
- ```
-
-Let's create a mesh file from a netcdf file with 1D lats and lons. On the sample files provided 1D lat and long coordinates are saved on `lsmlat` and `lsmlon` variables.
-
-```
-./mesh_maker.py --input /glade/scratch/negins/example_files/surfdata_4x5_hist_78pfts_CMIP6_simyr1850_275.0-330.0_-40-15_c220705.nc --output test_mesh_1d.nc --lat lsmlat --lon lsmlon --overwrite
-```
-`--verbose` option also provide additional information for debugging.
-
-This script will create regional and global mesh plots. For example for the above files, the plos are:
-test_mesh_1d_regional.png
-![image](https://user-images.githubusercontent.com/17344536/200441736-972a8136-5c05-4bc9-9bca-b498d972914a.png)
-
-
-test_mesh_1d_global.png
-
-![image](https://user-images.githubusercontent.com/17344536/200441753-d06e95d1-d85b-4216-9c23-d11ba89a31e4.png)
-
-
-
-------
- ## Creating Mesh files for a WRF domain:
-For running WRF-CTSM cases, we need to create ESMF mesh files for the WRF domain. We can create mesh file from wrfinput (wrf initial condition files). wrfinput has 2D coordinate information on `XLAT` and `XLONG` variable.
-
-For example, let's create a mesh file from a WRF input file for WRF-CTSM run.
- ```
-./mesh_maker.py --input /glade/scratch/negins/example_files/wrfinput_d01 --output test_mesh_wrf.nc --lat XLAT --lon XLONG --overwrite
-```
-
-This produce mesh files for running for our WRF domain.
-
-Here is how the regional plot looks like for this mesh file:
-
- ![image](https://user-images.githubusercontent.com/17344536/200442002-1ee5595c-9252-4934-a07c-2f6ad86aff1b.png)
-
-
-
\ No newline at end of file
diff --git a/doc/source/lilac/specific-atm-models/wrf-nesting.rst b/doc/source/lilac/specific-atm-models/wrf-nesting.rst
index f4c4570f2f..81428495f0 100644
--- a/doc/source/lilac/specific-atm-models/wrf-nesting.rst
+++ b/doc/source/lilac/specific-atm-models/wrf-nesting.rst
@@ -6,12 +6,9 @@
Using CTSM with WRF (Nested Model Runs)
========================================
-This section includes instructions on how to run WRF coupled with CTSM for a
-nested domain.
+This section includes instructions on how to run WRF coupled with CTSM for a nested domain.
-A nested domain is usually used to have a finer-resolution domain within the
-coarser model domain. A nested simulation enables running at a higher
-resolution over a smaller domain
+A nested domain is usually used to have a finer-resolution domain within the coarser model domain. A nested simulation enables running at a higher resolution over a smaller domain.
.. note::
A nest should cover a portion of the parent domain and is fully contained by
@@ -37,9 +34,7 @@ There are currently two types of nesting available within WRF:
This example clarifies the workflow for running a nested WRF-CTSM case using one-way nesting with ``ndown.exe``.
The procedure for running a nested simulation for WRF with CTSM is
-similar to the workflow for running WRF real cases, except that it requires
-additional steps to (1) clone the CTSM repository, (2) build
-CTSM and LILAC, and (3) define namelist options reuired for CTSM.
+similar to the workflow for running WRF real cases, except that it requires additional steps to (1) clone the CTSM repository, (2) build CTSM and LILAC, and (3) define namelist options reuired for CTSM.
A full description of all steps for a WRF-CTSM run are included here.
@@ -50,26 +45,25 @@ A full description of all steps for a WRF-CTSM run are included here.
Therefore, we are not repeating the steps necessary for building WRF and
CTSM.
-In this example we use a nested domain over the CONUS as shows below:
+In this example we use a nested domain over the CONUS as shown below:
.. _Figure ctsm-ndown:
-.. figure:: ndown_ctsm_diagram.svg
-
- Flowchart for WRF-CTSM one-way nested simulations
+.. todo::
+ Replace missing ndown_ctsm_diagram.svg
+
+Flowchart for WRF-CTSM one-way nested simulations
Nested Simulations : Pre-processing (geogrid.exe)
-------------------------------------------------
-In the WPS/ directory, edit `namelist.wps` for a nested simulation over your
-desired domains. Make sure to change `max_dom=2`.
+In the WPS/ directory, edit ``namelist.wps`` for a nested simulation over your
+desired domains. Make sure to change ``max_dom=2``.
-First, use geogrid.exe to define the domain and interpolate static geographical data
-to the grids::
+First, use geogrid.exe to define the domain and interpolate static geographical data to the grids::
./geogrid.exe >& log.geogrid
-This step creates two files `geo_em.d01.nc` & `geo_em.d02.nc` which includes
-the domain definition for each domain.
+This step creates two files, ``geo_em.d01.nc`` and ``geo_em.d02.nc``, which include the domain definition for each domain.
If the geogrid step finishes successfully, you should see the following message in the log file::
@@ -77,9 +71,10 @@ If the geogrid step finishes successfully, you should see the following message
! Successful completion of geogrid. !
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-The basic difference here with a non-nested case is the namelist.wps should
-have a column for each domain with `max_dom=2`. For example::
+The basic difference here with a non-nested case is the namelist.wps should have a column for each domain with ``max_dom=2``. For example:
+::
+
&share
wrf_core = 'ARW',
max_dom = 2,
@@ -101,9 +96,7 @@ Therefore ``geogrid.exe`` creates two files corresponding to each domain.
Nested Simulations : Pre-processing (ungrib.exe)
-------------------------------------------------
-As mentioned previously, the purpose of the ungrib script is to unpack GRIB
-meteorological data and pack it into an intermediate file format.
-This step is exactly identical to a non-nested simulation.
+As mentioned previously, the purpose of the ungrib script is to unpack GRIB meteorological data and pack it into an intermediate file format. This step is exactly identical to a non-nested simulation.
Run ungrib to get gribbed data into usable format to be ingested by WRF.
@@ -111,8 +104,7 @@ To run ungrib.exe, first link the GRIB data files that are going to be used::
./link_grib.csh $your_GRIB_data_path
-Based on your GRIB data type, link or copy the appropriate VTable to your WPS directory.
-WRF has some prepared VTable under ``/ungrib/Variable_tables/`` folder.
+Based on your GRIB data type, link or copy the appropriate VTable to your WPS directory. WRF has some prepared VTable under ``/ungrib/Variable_tables/`` folder.
Extract meteorological fields from GRIB-formatted files::
@@ -128,21 +120,17 @@ At this point, you should see ungrib output (intermediate files) in your WPS dir
Nested Simulations : Pre-processing (metgrid.exe)
-------------------------------------------------
-Ensure that the `start_date` and `end_date` for domain two is set correctly for
-your simulation.
-Next, run ``metgrid.exe``::
+Ensure that the `start_date` and `end_date` for domain two is set correctly for your simulation. Next, run ``metgrid.exe``::
./metgrid.exe >& log.metgrid
-Check the metgrid log for the following message showing successful completion of
-metgrid step::
+Check the metgrid log for the following message showing successful completion of metgrid step::
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
! Successful completion of metgrid. !
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-Running metgrid for two domains will create files like
-below::
+Running metgrid for two domains will create files like below::
met_em.d01.*
met_em.d02.*
@@ -150,25 +138,19 @@ below::
Nested Simulations : real.exe
------------------------------
-In this step, run ``real.exe`` to generate initial and boundary conditions for
-both domains.
+In this step, run ``real.exe`` to generate initial and boundary conditions for both domains.
In summary, complete the following steps:
Move or link WPS output files (``met_em.d01*`` and ``met_em.d02`` files) to your WRF test directory.
-Edit namelist.input for your WRF domain and desirable configurations.
-This should be the same domain as WPS namelist. Make sure you set ``max_dom =
-2,`` in the namelist.
+Edit namelist.input for your WRF domain and desirable configurations. This should be the same domain as WPS namelist. Make sure you set ``max_dom = 2,`` in the namelist.
-To run WRF-CTSM, in your namelist change land-surface option to 6 for both
-domains::
+To run WRF-CTSM, in your namelist change land-surface option to 6 for both domains::
sf_surface_physics = 6, 6,
-Run real.exe (if compiled parallel submit a batch job) to generate
-initail and boundary condition files for both domain.
-Make sure the following three files have been created in your directory::
+Run real.exe (if compiled parallel submit a batch job) to generate initial and boundary condition files for both domain. Make sure the following three files have been created in your directory::
wrfinput_d01
wrfinput_d02
@@ -178,6 +160,8 @@ The boundary condition file is only created for the outer domain.
Check the last line of the real log file for the following message:
+.. todo:: What message?
+
Rename wrfinput_d02
-------------------
Next, rename the ``wrfinput_d02`` file to ``wrfndi_d02``::
@@ -186,8 +170,7 @@ Next, rename the ``wrfinput_d02`` file to ``wrfndi_d02``::
Run ndown.exe
-------------
-In this step, we run ndown.exe to create initial and boundary condition for
-domain 2 based on the domain 1 (outer domain).
+In this step, we run ndown.exe to create initial and boundary condition for domain 2 based on the domain 1 (outer domain).
Add the following into your namelist.input file under ``&time_control``::
@@ -197,20 +180,15 @@ Run ndown.exe to create ``wrfinput_d02`` and ``wrfbdy_d02``.
Run WRF for coarser domain
---------------------------
-In this step, run WRF for the outer domain.
-Make sure that ``max_dom = 1`` to run only for the coarser domain.
+In this step, run WRF for the outer domain. Make sure that ``max_dom = 1`` to run only for the coarser domain.
-This step is exactly identical as the previous example and only creates the
-``wrfout*`` files for the coarser domain.
+This step is exactly identical as the previous example and only creates the ``wrfout*`` files for the coarser domain.
-Please make sure to copy ``lnd_in`` , ``lilac_in``, and ``lnd_modelio`` for the
-coarser domain in this directory.
+Please make sure to copy ``lnd_in`` , ``lilac_in``, and ``lnd_modelio`` for the coarser domain in this directory.
Create CTSM runtime files for the fine domain
---------------------------------------------
-This step is in addition creating CTSM runtime files for coarser domain which
-was explained here. For succesfully completing the previous step you should
-have already created these files for the coarser domain.
+This step is in addition creating CTSM runtime files for coarser domain which was explained here. For succesfully completing the previous step you should have already created these files for the coarser domain.
.. seealso::
@@ -219,8 +197,7 @@ have already created these files for the coarser domain.
files for the finer domain you should follow the steps in section
:numref:`setting-ctsm-runtime-options`.
-Again, the goal here is to create files that determine CTSM runtime options which
-are defined within these three files:
+Again, the goal here is to create files that determine CTSM runtime options which are defined within these three files:
- ``lnd_in``: This is the main namelist input file for CTSM inner domain
@@ -230,13 +207,9 @@ are defined within these three files:
Run WRF for the finer domain
-----------------------------
-First, save (rename or move) the data from the coarser domain simulation
-(``wrfout_d01_*`` files).
-Next, rename ``wrfinput_d02`` and ``wrfbdy_d02`` to ``wrfinput_d01`` and ``wrfbdy_d01``, respectively.
+First, save (rename or move) the data from the coarser domain simulation (``wrfout_d01_*`` files). Next, rename ``wrfinput_d02`` and ``wrfbdy_d02`` to ``wrfinput_d01`` and ``wrfbdy_d01``, respectively.
-Edit namelist.input, moving all of the fine-grid domain data from column 2 to column 1
-so that this run will be for the fine-grid domain only. Make sure you set
-`max_dom=1` and set your `time_step` based on the finer domain.
+Edit namelist.input, moving all of the fine-grid domain data from column 2 to column 1 so that this run will be for the fine-grid domain only. Make sure you set ``max_dom=1`` and set your ``time_step`` based on the finer domain.
.. note::
It may be beneficial to save namelist.input to something else prior to this step in case you need to repeat this
diff --git a/doc/source/lilac/specific-atm-models/wrf-tools.rst b/doc/source/lilac/specific-atm-models/wrf-tools.rst
index f67a05ea0a..1222a16f10 100644
--- a/doc/source/lilac/specific-atm-models/wrf-tools.rst
+++ b/doc/source/lilac/specific-atm-models/wrf-tools.rst
@@ -48,10 +48,16 @@ is described in here.
../../../configure --macros-format Makefile --mpilib mpi-serial
+.. todo::
+ Update the below, as domain files aren't needed with nuopc.
+
5. Generate CTSM domain files using ``get_domain`` tool::
./gen_domain -m /glade/work/$USER/ctsm/nldas_grid/scrip/wrf2clm_mapping_noneg.nc -o wrf2clm_ocn_noneg -l wrf2clm_lnd_noneg
+.. todo::
+ Update the below, as ``mksurfdata.pl`` no longer exists.
+
6. Create surface datasets in ``tools/mksurfdata_esmf``::
./mksurfdata.pl -res usrspec -usr_gname "nldas" -usr_gdate "190124" -usr_mapdir "/glade/work/$USER/ctsm/nldas_grid/map" -y 2000 -exedir "/glade/u/home/$USER/src/ctsm/ctsm_surfdata/tools/mksurfdata_esmf" -no-crop
diff --git a/doc/source/tech_note/Glacier/CLM50_Tech_Note_Glacier.rst b/doc/source/tech_note/Glacier/CLM50_Tech_Note_Glacier.rst
index 3e510561bb..9ed8fe280e 100644
--- a/doc/source/tech_note/Glacier/CLM50_Tech_Note_Glacier.rst
+++ b/doc/source/tech_note/Glacier/CLM50_Tech_Note_Glacier.rst
@@ -58,7 +58,7 @@ It is also possible to run CESM with an evolving ice sheet. In this case, CLM re
Glacier regions and their behaviors
-----------------------------------
-The world's glaciers and ice sheets are broken down into a number of different regions (four by default) that differ in three respects:
+The world's glaciers and ice sheets are broken down into a number of different regions (three by default) that differ in three respects:
#. Whether the gridcell's glacier land unit contains:
@@ -80,7 +80,7 @@ The world's glaciers and ice sheets are broken down into a number of different r
b. Ice runoff from snow capping is melted (generating a negative sensible heat flux) and runs off as liquid. This matches the behavior for non-glacier columns. This is appropriate in regions that have little iceberg calving in reality. This can be important to avoid unrealistic cooling of the ocean and consequent runaway sea ice growth.
-The default behaviors for the world's glacier and ice sheet regions are described in :numref:`Table Glacier region behaviors`. Note that the standard CISM grid covers Greenland plus enough surrounding area to allow for ice sheet growth and to have a regular rectangular grid. We need to have the "replaced by ice" melt behavior within the CISM domain in order to compute SMB there, and we need virtual elevation classes in that domain in order to compute SMB for all elevation classes and to facilitate glacial advance and retreat in the two-way-coupled case. However, this domain is split into Greenland itself and areas outside Greenland so that ice runoff in the Canadian archipelago (which is inside the CISM domain) is melted before reaching the ocean, to avoid runaway sea ice growth in that region.
+The default behaviors for the world's glacier and ice sheet regions are described in :numref:`Table Glacier region behaviors`. Note that the Greenland region stops at the edge of Greenland as defined by CISM. This means that, by default, SMB is not computed for grid cells outside Greenland but within the CISM domain. (This treatment of the non-Greenland portion of the CISM domain as being the same as the world's mountain glaciers rather than like Greenland itself is mainly for the sake of avoiding unrealistic fluxes from the Canadian archipelago that can potentially result in runaway sea ice growth in that region.)
.. _Table Glacier region behaviors:
@@ -93,13 +93,6 @@ The default behaviors for the world's glacier and ice sheet regions are describe
| Greenland | Virtual | Replaced by | Remains ice |
| | | ice | |
+---------------+---------------+---------------+---------------+
- | Inside | Virtual | Replaced by | Melted |
- | standard CISM | | ice | |
- | grid but | | | |
- | outside | | | |
- | Greenland | | | |
- | itself | | | |
- +---------------+---------------+---------------+---------------+
| Antarctica | Multiple | Replaced by | Remains ice |
| | | ice | |
+---------------+---------------+---------------+---------------+
@@ -109,7 +102,11 @@ The default behaviors for the world's glacier and ice sheet regions are describe
.. note::
- In regions that have both the ``Glacial melt = Replaced by ice`` and the ``Ice runoff = Melted`` behaviors (by default, this is just the region inside the standard CISM grid but outside Greenland itself): During periods of glacial melt, a negative ice runoff is generated (due to the ``Glacial melt = Replaced by ice`` behavior); this negative ice runoff is converted to a negative liquid runoff plus a positive sensible heat flux (due to the ``Ice runoff = Melted`` behavior). We recommend that you limit the portion of the globe with both of these behaviors combined, in order to avoid having too large of an impact of this non-physical behavior.
+It is possible to have non-virtual, non-SMB-computing areas within the CISM domain (as is the case for the portion of CISM's Greenland domain outside of Greenland itself). However, these areas will send 0 SMB and will not be able to adjust to CISM-dictated changes in glacier area. Therefore, it is best to set up the glacier regions and their behaviors so that as much of the CISM domain as possible is covered by virtual, SMB-computing areas.
+
+.. note::
+
+ The combination of the ``Glacial melt = Replaced by ice`` and the ``Ice runoff = Melted`` behaviors results in particularly non-physical behavior: During periods of glacial melt, a negative ice runoff is generated (due to the ``Glacial melt = Replaced by ice`` behavior); this negative ice runoff is converted to a negative liquid runoff plus a positive sensible heat flux (due to the ``Ice runoff = Melted`` behavior). The net result is zero runoff but a positive sensible heat flux generated from glacial melt. Because of how physically unrealistic this is, CLM does not allow this combination of behaviors.
.. _Multiple elevation class scheme:
diff --git a/doc/source/users_guide/adding-new-resolutions/Adding-New-Resolutions-or-New-Files-to-the-build-namelist-Database.rst b/doc/source/users_guide/adding-new-resolutions/Adding-New-Resolutions-or-New-Files-to-the-build-namelist-Database.rst
index 78edaaf629..5b22f8a706 100644
--- a/doc/source/users_guide/adding-new-resolutions/Adding-New-Resolutions-or-New-Files-to-the-build-namelist-Database.rst
+++ b/doc/source/users_guide/adding-new-resolutions/Adding-New-Resolutions-or-New-Files-to-the-build-namelist-Database.rst
@@ -6,9 +6,9 @@
Adding New Resolutions
========================
-In the last chapter we gave the details on how to create new files for input into CLM. These files could be either global resolutions, regional-grids or even a single grid point. If you want to easily have these files available for continued use in your development you will then want to include them in the build-namelist database so that build-namelist can easily find them for you. You can deal with them, just by putting the settings in the ``user_nl_clm namelist`` file, or by using ``CLM_USRDAT_NAME``. Another way to deal with them is to enter them into the database for build-namelist, so that build-namelist can find them for you. This keeps one central database for all your files, rather than having multiple locations to keep track of files. If you have a LOT of files to keep track of it also might be easier than keeping track by hand, especially if you have to periodically update your files. If you just have a few quick experiments to try, for a short time period you might be best off using the other methods mentioned above.
+In the last chapter we gave the details on how to create new files for input into CLM. These files could be either global resolutions, regional-grids or even a single grid point. If you want to easily have these files available for continued use in your development you will then want to include them in the build-namelist database so that build-namelist can easily find them for you. You can deal with them, just by putting the settings in the ``user_nl_clm`` namelist file, or by using ``CLM_USRDAT_NAME``. Another way to deal with them is to enter them into the database for build-namelist, so that build-namelist can find them for you. This keeps one central database for all your files, rather than having multiple locations to keep track of files. If you have a LOT of files to keep track of it also might be easier than keeping track by hand, especially if you have to periodically update your files. If you just have a few quick experiments to try, for a short time period you might be best off using the other methods mentioned above.
-There are two parts to adding files to the build-namelist database. The first part is adding new resolution names which is done in the ``$CTSMROOT/bld/namelist_files/namelist_definition_clm4_5.xml`` file. You can then use the new resolution by using ``CLM_USRDAT_NAME``. If you also want to be able to give the resolution into **create_newcase** -- you'll need to add the grid to the ``$CIMEROOT/config/cesm/config_grid.xml`` file.
+There are two parts to adding files to the build-namelist database. The first part is adding new resolution names which is done in the ``$CTSMROOT/bld/namelist_files/namelist_definition_ctsm.xml`` file. You can then use the new resolution by using ``CLM_USRDAT_NAME``. If you also want to be able to give the resolution to ``$CTSMROOT/cime/scripts/create_newcase`` -- you'll need to add the grid to the ``$CIMEROOT/config/cesm/config_grid.xml`` file.
-The second part is actually adding the new filenames which is done in the ``$CTSMROOT/bld/namelist_files/namelist_defaults_clm4_5.xml`` file (``$CTSMROOT/bld/namelist_files/namelist_defaults_clm4_5_tools.xml`` file for CLM tools). If you aren't adding any new resolutions, and you are just changing the files for existing resolutions, you don't need to edit the namelist_definition file.
+The second part is actually adding the new filenames which is done in the ``$CTSMROOT/bld/namelist_files/namelist_definition_ctsm.xml`` file. If you aren't adding any new resolutions, and you are just changing the files for existing resolutions, you don't need to edit the namelist_definition file.
diff --git a/doc/source/users_guide/adding-new-resolutions/Adding-Resolution-Names.rst b/doc/source/users_guide/adding-new-resolutions/Adding-Resolution-Names.rst
index 216de19f54..ea10a3c409 100644
--- a/doc/source/users_guide/adding-new-resolutions/Adding-Resolution-Names.rst
+++ b/doc/source/users_guide/adding-new-resolutions/Adding-Resolution-Names.rst
@@ -6,7 +6,7 @@
Adding Resolution Names
=========================
-If you are adding files for new resolutions which aren't covered in the namelist_definition file -- you'll need to add them in. The list of valid resolutions is in the id="res" entry in the ``$CTSMROOT/bld/namelist_files/namelist_definition_clm4_5.xml`` file. You need to choose a name for your new resolution and simply add it to the comma delimited list of valid_values for the id="res" entry. The convention for global Gaussian grids is number_of_latitudes x number_of_longitudes. The convention for global finite volume grids is latitude_grid_size x longitude_grid_size where latitude and longitude is measured in degrees. The convention for unstructured HOMME grids is nenp4, where corresponds to the resolution. The higher is the higher the resolution. So for example, ne60np4 is roughly half-degree while ne240np4 is roughly a eighth degree. For regional or single-point datasets the names have a grid size number_of_latitudes x number_of_longitudes followed by an underscore and then a descriptive name such as a City name followed by an abbreviation for the Country in caps. The only hard requirement is that names be unique for different grid files. Here's what the entry for resolutions looks like in the file:
+If you are adding files for new resolutions which aren't covered in the namelist_definition file -- you'll need to add them in. The list of valid resolutions is in the ``id="res"`` entry in the ``$CTSMROOT/bld/namelist_files/namelist_definition_ctsm.xml`` file. You need to choose a name for your new resolution and simply add it to the comma delimited list of valid_values for the ``id="res"`` entry. The convention for global Gaussian grids is number_of_latitudes x number_of_longitudes. The convention for global finite volume grids is latitude_grid_size x longitude_grid_size where latitude and longitude is measured in degrees. The convention for unstructured HOMME grids is nenp4, where corresponds to the resolution. The higher is the higher the resolution. So for example, ne60np4 is roughly half-degree while ne240np4 is roughly a eighth degree. For regional or single-point datasets the names have a grid size number_of_latitudes x number_of_longitudes followed by an underscore and then a descriptive name such as a City name followed by an abbreviation for the Country in caps. The only hard requirement is that names be unique for different grid files. Here's what the entry for resolutions looks like in the file:
::
-lnd/clm2/surfdata_map/surfdata_0.9x1.25_78pfts_CMIP6_simyr1850_c170824.nc
-
-```
+::
+
+
+ lnd/clm2/surfdata_map/surfdata_0.9x1.25_78pfts_CMIP6_simyr1850_c170824.nc
+
-Other ``fsurdat`` files are distinguished from this one by their resolution (hgrid), simulation year (sim_year) and prognostic crop (use_crop) attributes.
-To add or change the default filenames for CLM tools edit the ``$CTSMROOT/bld/namelist_files/namelist_defaults_|version|_tools.xml`` and either change an existing filename or add a new one. Editing this file is similar to the ``namelist_defaults_clm4_5.xml`` talked about above.
+Other ``fsurdat`` files are distinguished from this one by their resolution (``hgrid``), simulation year (``sim_year``) and prognostic crop (``use_crop``) attributes.
----------------------------
What are the required files?
@@ -30,4 +29,5 @@ In the following table we list the different files used by CLM, they are listed
Table 3-1. Required Files for Different Configurations and Simulation Types
---------------------------------------------------------------------------
-Insert table 3-1
+.. todo::
+ Insert table 3-1
diff --git a/doc/source/users_guide/index.rst b/doc/source/users_guide/index.rst
index 75a2949bec..0bb2dbd75d 100644
--- a/doc/source/users_guide/index.rst
+++ b/doc/source/users_guide/index.rst
@@ -24,8 +24,8 @@
adding-new-resolutions/index.rst
running-special-cases/index.rst
running-single-points/index.rst
- running-PTCLM/index.rst
trouble-shooting/index.rst
testing/index.rst
+ using-mesh-maker/index.rst
Documentation was built: |today|
diff --git a/doc/source/users_guide/overview/getting-help.rst b/doc/source/users_guide/overview/getting-help.rst
index 74765d1ad0..7c33071db9 100644
--- a/doc/source/users_guide/overview/getting-help.rst
+++ b/doc/source/users_guide/overview/getting-help.rst
@@ -146,9 +146,6 @@ MOSART
PFT
Plant Function Type (PFT). A type of vegetation that CLM parameterizes.
-PTCLM
- PoinT CLM (PTCLM) a python script that operates on top of CLM for |version| to run single point simulations for CLM.
-
ROF
River runOff Model to route flow of surface water over land out to the ocean. |cesmrelease| has two components options for this
the new model MOSART and previous model RTM.
diff --git a/doc/source/users_guide/overview/introduction.rst b/doc/source/users_guide/overview/introduction.rst
index bc7c1fd82c..8056e6ab16 100644
--- a/doc/source/users_guide/overview/introduction.rst
+++ b/doc/source/users_guide/overview/introduction.rst
@@ -60,9 +60,7 @@ As a followup to the tools chapter, :ref:`adding-new-resolutions-section` tells
In :ref:`running-special-cases-section`, again for the expert user, we give details on how to do some particularly difficult special cases. For example, we give the protocol for spinning up the |version|-BGC and CLMCN models as well as CLM with dynamic vegetation active (CNDV). We give instructions to do a spinup case from a previous case with Coupler history output for atmospheric forcing. We also give instructions on running both the prognostic crop and irrigation models. Lastly we tell the user how to use the DATM model to send historical CO2 data to CLM.
-:ref:`running-single-points` outlines how to do single-point or regional simulations using |version|. This is useful to either compare |version| simulations with point observational stations, such as tower sites (which might include your own atmospheric forcing), or to do quick simulations with CLM for example to test a new parameterization. There are several different ways given on how to perform single-point simulations which range from simple PTS_MODE to more complex where you create all your own datasets, tying into :ref:`using-clm-tools-section` and also :ref:`adding-new-resolutions-section` to add the files into the build-namelist XML database. The PTCLM python script to run single-point simulations was added back in for this release (but it has bugs that don't allow it to work out of the box). CLM4 in CESM1.0.5 has a fully working versions of PTCLM.
-
-Need :ref:`running-PTCLM` blurb...
+:ref:`running-single-points` outlines how to do single-point or regional simulations using |version|. This is useful to either compare |version| simulations with point observational stations, such as tower sites (which might include your own atmospheric forcing), or to do quick simulations with CLM for example to test a new parameterization. There are several different ways given on how to perform single-point simulations which range from simple PTS_MODE to more complex where you create all your own datasets, tying into :ref:`using-clm-tools-section` and also :ref:`adding-new-resolutions-section` to add the files into the build-namelist XML database.
:ref:`troubleshooting-index` gives some guidance on trouble-shooting problems when using |version|. It doesn't cover all possible problems with CLM, but gives you some guidelines for things that can be done for some common problems.
@@ -109,7 +107,7 @@ The README (which can be found in ``$CTSMROOT/doc``) is repeated here.
A CTSM versus a CESM checkout
=============================
-The directory structure for |version| is different depending on if it's checked out from |release| or |cesmrelease|. If |version| is checked out from |ctsm_gh| the CLM source code is directly under the top level directory. If |cesmrelease| is checkout out from |cesm_gh| then the CLM source directories are under "components/clm" from the top level directory. We will refer to this directory for the CLM source directories in the User's Guide as "$CTSMROOT".
+The directory structure for |version| is different depending on if it's checked out from |release| or |cesmrelease|. If |version| is checked out from |ctsm_gh| the CLM source code is directly under the top level directory. If |cesmrelease| is checkout out from |cesm_gh| then the CLM source directories are under ``components/clm`` from the top-level directory. We will refer to this directory for the CLM source directories in the User's Guide as ``$CTSMROOT``.
.. _how-to-use-this-document:
diff --git a/doc/source/users_guide/overview/quickstart.rst b/doc/source/users_guide/overview/quickstart.rst
index 5414963c4d..63f4afea6f 100644
--- a/doc/source/users_guide/overview/quickstart.rst
+++ b/doc/source/users_guide/overview/quickstart.rst
@@ -16,7 +16,7 @@ List of utilities required for CESM in the `Software/OS Prerequisites `_. Once you are familiar with how to setup cases for any type of simulation with CESM you will want to direct your attention to the specifics of using CLM.
-For some of the details of setting up cases for |version| read the README and text files available from the "$CTSMROOT/doc" directory (see the "CLM Web pages" section for a link to the list of these files). Here are the important ones that you should be familiar with:
+For some of the details of setting up cases for |version| read the README and text files available from the ``$CTSMROOT/doc`` directory (see the "CLM Web pages" section for a link to the list of these files). Here are the important ones that you should be familiar with:
- :ref:`readme` describing the directory structure.
- The IMPORTANT_NOTES file talks about important things for users to know about using the model scientifically. It content is given in the next chapter on :ref:`scientific-validiation`.
diff --git a/doc/source/users_guide/running-PTCLM/adding-ptclm-site-data.rst b/doc/source/users_guide/running-PTCLM/adding-ptclm-site-data.rst
deleted file mode 100644
index b95831427f..0000000000
--- a/doc/source/users_guide/running-PTCLM/adding-ptclm-site-data.rst
+++ /dev/null
@@ -1,88 +0,0 @@
-.. include:: ../substitutions.rst
-
-.. _adding-ptclm-site-data:
-
-============================
-Adding PTCLMmkdata Site Data
-============================
-
-The "sitegroupname" option to PTCLMmkdata looks for groups of sites in the files in the ``PTCLM_sitedata`` directory under the PTCLMmkdata directory. You can add new names available for this option including your own lists of sites, by adding more files in this directory. There are three files for each "sitegroupname": ``$SITEGROUP_sitedata.txt``, ``$SITEGROUP_soildata.txt`` and ``$SITEGROUP_pftdata.txt`` (where ``$SITEGROUP`` is the name that would be entered as "sitegroupname" to PTCLMmkdata). Each file needs to have the same list of sites, but gives different information: site data, PFT data, and soil data respectively. Although the site codes need to be the same between the three files, the files do NOT have to be in the same order. Each file has a one-line header that lists the contents of each column which are separated by commas. The first column for each of the files is the "site_code" which must be consistent between the three files. The site code can be any unique character string, but in general we use the AmeriFlux site code.
-
-Site data file:`` $SITEGROUP_sitedata.txt``): The header for this file is:
-::
-
- site_code,name,state,lon,lat,elev,startyear,endyear,alignyear
-
-The columns: name, state, and elevation are informational only. Name is a longer descriptive name of the site, and state is the state for U.S. sites or country for non U.S. sites. The columns: lon and lat are the longitude and latitude of the location in decimal degrees. The last three columns are the start and ending year for the data and the align year for an 1850 case for the data. The align year is currently unused.
-
-Soil data file: ``$SITEGROUP_soildata.txt``): The header for this file is:
-::
-
- site_code,soil_depth,n_layers,layer_depth,layer_sand%,layer_clay%
-
-The first three fields after "site_code" are currently unused. The only two that are used are the percent sand and clay columns to set the soil texture.
-
-PFT data file: ``$SITEGROUP_pftdata.txt```): The header for this file is:
-::
-
- site_code,pft_f1,pft_c1,pft_f2,pft_c2,pft_f3,pft_c3,pft_f4,pft_c4,pft_f5,pft_c5
-
-This file gives the vegetation coverage for the different vegetation types for the site. The file only supports up to five PFT's at the same time. The columns with "pft_f" are the fractions for each PFT, and the columns with "pft_c" is the integer index of the given PFT. Look at the pft-physiology file to see what the PFT index for each PFT type is.
-
-----------------------------------------------------
-Dynamic Land-Use Change Files for use by PTCLMmkdata
-----------------------------------------------------
-
-There is a mechanism for giving site-specific land-use change in PTCLMmkdata. Adding site specific files to the ``PTCLM_sitedata`` directory under PTCLMmkdata allows you to specify the change in vegetation and change in harvesting (for the CN model) for that site. Files are named: ``$SITE_dynpftdata.txt``. There is a sample file for the US-Ha1 site called: ``US-Ha1_dynpftdata.txt``. The file has a one-line header with the information that the file has, and then one-line for each year with a transition. The header line is as follows:
-::
-
- trans_year,pft_f1,pft_c1,pft_f2,pft_c2,pft_f3,pft_c3,pft_f4,pft_c4,pft_f5,pft_c5,har_vh1,har_vh2,har_sh1,har_sh2,har_sh3,graze,hold_harv,hold_graze
-
-This file only requires a line for each year where a transition or harvest happens. As in the "pftdata" file above "pft_f" refers to the fraction and "pft_c" refers to the PFT index, and only up to five vegetation types are allowed to co-exist. The last eight columns have to do with harvesting and grazing. The last two columns are whether to hold harvesting and/or grazing constant until the next transition year and will just be either 1 or 0. This file will be converted by the **PTCLM_sitedata/cnvrt_trnsyrs2_pftdyntxtfile.pl** script in the PTCLMmkdata directory to a format that **mksurfdata_esmf** can read that has an entry for each year for the range of years valid for the compset in question.
-
-.. _converting-ameriflux-for-ptclmmkdata:
-
-------------------------------------------------
-Converting AmeriFlux Data for use by PTCLMmkdata
-------------------------------------------------
-
-AmeriFlux data comes in comma separated format and is available from: `http://public.ornl.gov/ameriflux/dataproducts.shtml `_. Before you download the data you need to agree to the usage terms.
-
-Here is a copy of the usage terms from the web-site on June/13/2011.
-
-"The AmeriFlux data provided on this site are freely available and were furnished by individual AmeriFlux scientists who encourage their use. Please kindly inform the appropriate AmeriFlux scientist(s) of how you are using the data and of any publication plans. Please acknowledge the data source as a citation or in the acknowledgments if the data are not yet published. If the AmeriFlux Principal Investigators (PIs) feel that they should be acknowledged or offered participation as authors, they will let you know and we assume that an agreement on such matters will be reached before publishing and/or use of the data for publication. If your work directly competes with the PI's analysis they may ask that they have the opportunity to submit a manuscript before you submit one that uses unpublished data. In addition, when publishing, please acknowledge the agency that supported the research. Lastly, we kindly request that those publishing papers using AmeriFlux data provide preprints to the PIs providing the data and to the data archive at the Carbon Dioxide Information Analysis Center (CDIAC)."
-
-The above agreement applies to the "US-UMB" dataset imported into our repository as well, and Gil Bohrer is the PI on record for that dataset.
-
-The CESM can NOT handle missing data, so we recommend using the "Level 4" Gap filled datasets. The fields will also need to be renamed. The "WS" column becomes "WIND", "PREC" becomes "PRECmms", "RH" stays as "RH", "TA" becomes "TBOT", "Rg" becomes "FSDS", "Rgl" becomes "FLDS", "PRESS" becomes "PSRF". "ZBOT" can just be set to the constant of "30" (m). The units of Temperature need to be converted from "Celsius" to "Kelvin" (use the value in ``SHR_CONST_TKFRZ`` in the file ``models/csm_share/shr/shr_const.F90`` of ``273.15``. The units of Pressure also need to be converted from "kPa" to "Pa". LATIXY, and LONGXY should also be set to the latitude and longitude of the site.
-
------------------------------------------------------------------
-Example: PTCLMmkdata transient example over a shorter time period
------------------------------------------------------------------
-
-This is an example of using PTCLMmkdata for Harvard Forest (AmeriFlux site code US-Ha1) for transient land use 1991-2006. In order to do this we would've needed to have converted the AmeriFlux data into NetCDF format as shown in :ref:`converting-ameriflux-for-ptclmmkdata` section above. Also note that this site has a site-specific dynamic land-use change file for it ``PTCLM_sitedata/US-Ha1_dynpftdata.txt`` in the PTCLMmkdata directory and this file will be used for land-use change and harvesting rather than the global dataset.
-
-::
-
- > cd $CTSMROOT/tools/PTCLM
- # We are going to use forcing data over 1991 to 2006, but we need to start with
- # a transient compset to do so, so we use the 20th Century transient: 1850-2000
- # Note: When creating the fpftdyn dataset for this site it will use the
- # PTCLM_sitedata/US-Ha1_dynpftdata.txt
- # file for land-use change and harvesting
- > ./PTCLMmkdata -s US-Ha1 -d $MYCSMDATA --sitegroupname AmeriFlux
- > mkdir $MYCSMDATA/atm/datm7/CLM1PT_data/1x1pt_US-Ha1
- > cd $MYCSMDATA/atm/datm7/CLM1PT_data/1x1pt_US-Ha1
- # Copy data in NetCDF format to this directory, filenames should be YYYY-MM.nc
- # The fieldnames on the file should be:
- # FLDS,FSDS,LATIXY, LONGXY, PRECTmms,PSRF,RH,TBOT,WIND,ZBOT
- # With units
- # W/m2,W/m2,degrees_N,degrees_E,mm/s, Pa, %, K, m/s, m
- # The time coordinate units should be: days since YYYY-MM-DD 00:00:00
- > cd ../../../../../US-Ha1_I20TRCRUCLM45BGC
- # Now we need to set the start date to 1991, and make sure the align year is for 1991
- > ./xmlchange RUN_STARTDATE=1991-01-01,DATM_CLMNCEP_YR_ALIGN=1991
- # Similarly for Nitrogen deposition data we cycle over: 1991 to 2006
- > cat << EOF >> user_nl_clm
- model_year_align_ndep=1991,stream_year_first_ndep=1991,stream_year_last_ndep=2006
- EOF
diff --git a/doc/source/users_guide/running-PTCLM/introduction-to-ptclm.rst b/doc/source/users_guide/running-PTCLM/introduction-to-ptclm.rst
deleted file mode 100644
index 9ae4186d2c..0000000000
--- a/doc/source/users_guide/running-PTCLM/introduction-to-ptclm.rst
+++ /dev/null
@@ -1,135 +0,0 @@
-.. _introduction-to-ptclm.rst:
-
-.. include:: ../substitutions.rst
-
-.. _what-is-ptclm:
-
-=====================
- What is PTCLMmkdata?
-=====================
-
-PTCLMmkdata (pronounced Pee-Tee Cee-L-M make data is a Python script to help you set up PoinT CLM simulations.
-
-It runs the CLM tools for you to get datasets set up, and copies them to a location you can use them including the changes needed for a case to use the dataset with namelist and XML changes.
-
-Then you run **create_newcase** and point to the directory so that the namelist and XML changes are automatically applied.
-
-PTCLMmkdata has a simple ASCII text file for storing basic information for your sites.
-
-We also have complete lists for AmeriFlux and Fluxnet-Canada sites, although we only have the meteorology data for one site.
-
-For other sites you will need to obtain the meteorology data and translate it to a format that the CESM datm model can use.
-
-But, even without meteorology data PTCLMmkdata is useful to setup datasets to run with standard ``CLM_QIAN`` data.
-
-The original authors of PTCLMmkdata are: Daniel M. Ricciuto, Dali Wang, Peter E. Thornton, Wilfred M. Post all at Environmental Sciences Division, Oak Ridge National Laboratory (ORNL) and R. Quinn Thomas at Cornell University. It was then modified fairly extensively by Erik Kluzek at NCAR. We want to thank all of these individuals for this contribution to the CESM effort. We also want to thank the folks at University of Michigan Biological Stations (US-UMB) who allowed us to use their Fluxnet station data and import it into our inputdata repository, especially Gil Bohrer the PI on record for this site.
-
-.. _details-of-ptclm:
-
-=======================
- Details of PTCLMmkdata
-=======================
-
-To get help on PTCLM2_180611 use the "--help" option as follows.
-::
-
- > cd $CTSMROOT/tools/PTCLM
- > ./PTCLMmkdata --help
-
-The output to the above command is as follows:
-::
-
- Usage: PTCLM.py [options] -d inputdatadir -m machine -s sitename
-
- Python script to create cases to run single point simulations with tower site data.
-
- Options:
- --version show program's version number and exit
- -h, --help show this help message and exit
-
- Required Options:
- -d CCSM_INPUT, --csmdata=CCSM_INPUT
- Location of CCSM input data
- -m MYMACHINE, --machine=MYMACHINE
- Machine, valid CESM script machine (-m list to list valid
- machines)
- -s MYSITE, --site=MYSITE
- Site-code to run, FLUXNET code or CLM1PT name (-s list to list
- valid names)
-
- Configure and Run Options:
- -c MYCOMPSET, --compset=MYCOMPSET
- Compset for CCSM simulation (Must be a valid 'I' compset [other
- than IG compsets], use -c list to list valid compsets)
- --coldstart Do a coldstart with arbitrary initial conditions
- --caseidprefix=MYCASEID
- Unique identifier to include as a prefix to the case name
- --cesm_root=BASE_CESM
- Root CESM directory (top level directory with models and scripts
- subdirs)
- --debug Flag to turn on debug mode so won't run, but display what would
- happen
- --finidat=FINIDAT Name of finidat initial conditions file to start CLM from
- --list List all valid: sites, compsets, and machines
- --namelist=NAMELIST
- List of namelist items to add to CLM namelist (example:
- --namelist="hist_fincl1='TG',hist_nhtfrq=-1"
- --QIAN_tower_yrs Use the QIAN forcing data year that correspond to the tower
- years
- --rmold Remove the old case directory before starting
- --run_n=MYRUN_N Number of time units to run simulation
- --run_units=MYRUN_UNITS
- Time units to run simulation (steps,days,years, etc.)
- --quiet Print minimul information on what the script is doing
- --sitegroupname=SITEGROUP
- Name of the group of sites to search for you selected site in
- (look for prefix group names in the PTCLM_sitedata directory)
- --stdurbpt If you want to setup for standard urban namelist settings
- --useQIAN use QIAN input forcing data instead of tower site meterology data
- --verbose Print out extra information on what the script is doing
-
- Input data generation options:
- These are options having to do with generation of input datasets. Note: When
- running for supported CLM1PT single-point datasets you can NOT generate new
- datasets. For supported CLM1PT single-point datasets, you MUST run with the
- following settings: --nopointdata And you must NOT set any of these: --soilgrid
- --pftgrid --owritesrf
-
- --nopointdata Do NOT make point data (use data already created)
- --owritesrf Overwrite the existing surface datasets if they exist (normally
- do NOT recreate them)
- --pftgrid Use pft information from global gridded file (rather than site
- data)
- --soilgrid Use soil information from global gridded file (rather than site
- data)
-
- Main Script Version Id: $Id: PTCLM.py 47576 2013-05-29 19:11:16Z erik $ Scripts URL: $HeadURL: https://svn-ccsm-models.cgd.ucar.edu/PTCLM/trunk_tags/PTCLM1_130529/PTCLM.py $:
-
-Here we give a simple example of using PTCLMmkdata for a straightforward case of running at the US-UMB Fluxnet site on cheyenne where we already have the meteorology data on the machine. Note, see :ref:`converting-ameriflux-for-ptclmmkdata` for permission information to use this data.
-
-Example 6-1. Example of running PTCLMmkdata for US-UMB on cheyenne
-------------------------------------------------------------------
-::
-
- > setenv CSMDATA $CESMDATAROOT/inputdata
- > setenv MYDATAFILES `pwd`/mydatafiles
- > setenv SITE US-UMB
- > setenv MYCASE testPTCLM
-
- # Next build all of the clm tools you will need
- > cd $CTSMROOT/tools/PTCLM
- > buildtools
- # next run PTCLM (NOTE -- MAKE SURE python IS IN YOUR PATH)
- > cd $CTSMROOT/tools/PTCLM
- # Here we run it using qcmd so that it will be run on a batch node
- > qcmd -- ./PTCLMmkdata --site=$SITE --csmdata=$CSMDATA --mydatadir=$MYDATAFILES >& ptclmrun.log &
- > cd $CIMEROOT/scripts
- > ./create_newcase --user-mods-dir $MYDATAFILES/1x1pt_$SITE --case $MYCASE --res CLM_USRDAT --compset I1PtClm50SpGs
- # Next setup, build and run as normal
- > cd $MYCASE
- > ./case.setup
-
-PTCLMmkdata includes a README file that gives some extra details and a simple example.
-
-.. include:: ../../../../tools/PTCLM/README
- :literal:
diff --git a/doc/source/users_guide/running-PTCLM/ptclm-examples.rst b/doc/source/users_guide/running-PTCLM/ptclm-examples.rst
deleted file mode 100644
index 6801c5f3d8..0000000000
--- a/doc/source/users_guide/running-PTCLM/ptclm-examples.rst
+++ /dev/null
@@ -1,33 +0,0 @@
-.. include:: ../substitutions.rst
-
-.. _ptclm-examples:
-
-==============================
- Examples of using PTCLMmkdata
-==============================
-
-Now let's give a few more complex examples using some of the options we have discussed above.
-
-Now, let's demonstrate using a different group list, doing a spinup, running with Qian global forcing data, but using tower years to set the years to run over. This uses the options: sitegroupname, useQIAN, and QIANtower_years.
-
-Example: Running PTCLMmkdata without tower years
-------------------------------------------------
-::
-
- > cd $CTSMROOT/tools/PTCLM
- > ./PTCLMmkdata -s US-Ha1 -d $CSMDATA --sitegroupname AmeriFlux --donot_use_tower_yrs
- > cd ../../../../../US-Ha1_ICRUCLM45BGC_QIAN
- # Now build and run normally
- ```
-
-Finally, let's demonstrate using a generic machine (which then requires the scratchroot option), using the global grid for PFT and soil types, and setting the run length to two months.
-
-Example: Running PTCLMmkdata with global PFT and soil types dataset
--------------------------------------------------------------------
-::
-
- > cd $CTSMROOT/tools/PTCLM
- # Note, see the the Section called Converting AmeriFlux Data for use by PTCLMmkdata with permission information
- # to use the US-UMB data.
- > ./PTCLMmkdata -s US-UMB -d $CSMDATA --pftgrid --soilgrid
- > cd ../../../../../US-UMB_ICRUCLM45BGC
diff --git a/doc/source/users_guide/running-PTCLM/using-ptclm.rst b/doc/source/users_guide/running-PTCLM/using-ptclm.rst
deleted file mode 100644
index e7be79bee6..0000000000
--- a/doc/source/users_guide/running-PTCLM/using-ptclm.rst
+++ /dev/null
@@ -1,114 +0,0 @@
-.. include:: ../substitutions.rst
-
-.. _using-ptclm.rst:
-
-**************************
-Using PTCLMmkdata
-**************************
-
-There are two types of options to PTCLMmkdata: required and optional. The three required options are the three settings that MUST be specified for PTCLMmkdata to work at all. The other settings have default values that will default to something useful. Most options use a double dash "--" "longname" such as "--list", but the most common options also have a short-name with a single dash.
-
-The required options to PTCLMmkdata are: inputdata directory (-d) and site-name (-s). Inputdata directory is the directory where you have the CESM inputdata files. Finally site-name is the name of the site that you want to run for. Site-name is a Fluxnet site name from the list of sites you are running on (see the --sitegroupname for more information about the site lists).
-
-After PTCLMmkdata is run you can run **create_newcase** to setup a case to use the datasets created. It also creates a ``README.PTCLM`` in that directory that documents the commandline options to PTCLMmkdata that were used to create it.
-
-After "help" the "list" option is one of the most useful options for getting help on using PTCLMmkdata. This option gives you information about some of the other options to PTCLMmkdata. To get a list of the sites that can be used for PTCLMmkdata use the "--list" option as follows.
-::
-
- > cd $CTSMROOT/tools/PTCLM
- > ./PTCLMmkdata --list
-
-The output to the above command is as follows:
-::
-
- /bin/sh: line 1: PTCLMmkdata: command not found
-
-Steps in running PTCLMmkdata
-============================
-
-1. Build the CLM tools Next you need to make sure all the CLM FORTRAN tools are built.
- ::
-
- > cd $CTSMROOT/tools/PTCLM
- > ./buildtools
- > gmake clean
-
-2. Run PTCLMmkdata Next you actually run PTCLMmkdata which does the different things listed below:
-
- a. PTCLMmkdata names your output file directory based on your input
- ::
-
- [Prefix_]SiteCode
-
- Where:
- ``Prefix`` is from the caseidprefix option (or blank if not used).
-
- ``SiteCode`` is the site name you entered with the -s option.
-
- For example, the casename for the following will be:
- ::
-
- > cd scripts
- > ./PTCLMmkdata -s US-UMB -d $MYCSMDATA
-
- b. PTCLMmkdata creates datasets for you It will populate $MYCSMDATA with new datasets it creates using the CLM tools.
-
- c. If a transient compset and PTCLMmkdata finds a _dynpftdata.txt file If you are running a transient compset (such as the "I_1850-2000_CN" compset) AND you there is a file in the PTCLM_sitedata directory under the PTCLMmkdata directory called $SITE_dynpftdata.txt it will use this file for the land-use changes. Otherwise it will leave land-use constant, unless you use the pftgrid option so it uses the global dataset for landuse changes. See the Section called Dynamic Land-Use Change Files for use by PTCLMmkdata for more information on this. There is a sample transient dataset called US-Ha1_dynpftdata.txt. Transient compsets, are compsets that create transient land-use change and forcing conditions such as: 'I_1850-2000', 'I_1850-2000_CN', 'I_RCP8.5_CN', 'I_RCP6.0_CN', 'I_RCP4.5_CN', or 'I_RCP2.6_CN'.
-
- d. PTCLMmkdata creates a pft-physiology for you PTCLMmkdata will create a local copy of the pft-physiology specific for your site that you could then customize with changes specific for that site.
-
- e. PTCLMmkdata creates a README.PTCLM for you PTCLMmkdata will create a simple text file with the command line for it in a file called README.PTCLM in the case directory it creates for you.
-
-3. Run create_newcase pointing to the directory created
-
-4. Customize, setup, build and run case as normal You then customize your case as you would normally. See the Chapter 1 chapter for more information on doing this.
-
-PTCLMmkdata options
-=========================
-
-Next we discuss the setup and run-time options, dividing them up into setup, initial condition (IC), and run-time options.
-
-Configure options include:
-
-- --cesm_root=BASE_CESM
-- --sitegroupname=SITEGROUP
-- --donot_use_tower_yrs
-
-``--cesm_root``
- This option is for running PTCLMmkdata with a different root directory to CESM than the version PTCLMmkdata exists in. Normally you do NOT need to use this option.
-
-``--sitegroupname``
- In the PTCLMmkdata directory there is a subdirectory "PTCLM_sitedata" that contains files with the site, PFT and soil data information for groups of sites. These site groups are all separate ASCII files with the same prefix followed by a "_*data.txt" name. See :ref:`adding-ptclm-site-data` for more information on these files. By default we have provided three different valid group names:
-
-EXAMPLE
--------
-AmeriFlux
-
-Fluxnet-Canada
-
-The EXAMPLE is the group used by default and ONLY includes the US-UMB site as that is the only site we have data provided for. The other two site groups include the site information for all of both the AmeriFlux and Fluxnet-Canada sites. You can use the "sitegroupname" option to use one of the other lists, or you can create your own lists using the EXAMPLE file as an example. Your list of sites could be real world locations or could be theoretical "virtual" sites given to exercise CLM on differing biomes for example. Note, see :ref:`converting-ameriflux-for-ptclmmkdata` with permission information to use the US-UMB data.
-
-``--donot_use_tower_yrs``
- This option is used with the "useQIAN" option to set the years to cycle over for the Qian data. In this case Qian atmospheric forcing will be used, but the simulation will run over the same years that tower site is available for this site.
-
-**Run-time options include:**
-
-- --debug
-
-This option tells PTCLMmkdata to echo what it would do if it were run, but NOT actually run anything. So it will show you the dataset creation commands it would use. It does however, run **create_newcase**, but then it only displays the **xmlchange** commands and changes that it would do. Also note that if you give the "--rmold" option it won't delete the case directory beforehand. Primarily this is intended for debugging the operation of PTCLMmkdata.
-
-**The dataset generation options are:**
-
-- --pftgrid
-- --soilgrid
-
-The options that with a "grid" suffix all mean to create datasets using the global gridded information rather than using the site specific point data. By default the site specific point data is used. The "nopointdata" and "owritesrfaer" options have to do with file creation.
-
-Because supported single-point datasets already have the data created for them, you MUST use the "nopointdata" and "ndepgrid" options when you are using a supported single-point site. You must use "ndepgrid" even for a compset without CN. You also can NOT use the options: "soilgrid", "pftgrid", "aerdepgrid", or "owritesrfaer".
-
-``--pftgrid``
- This option says to use the PFT values provided on the global dataset rather than using the specific site based values from the PTCLM_sitedata/\*_pftdata.txt file when creating the surface dataset. This option must NOT be used when you you are using a site that is a supported single point dataset.
-
-``--soilgrid``
- This option says to use the soil values provided on the global dataset rather than using the specific site based values from the PTCLM_sitedata/\*_soildata.txt file when creating the surface dataset. This option must NOT be used when you you are using a site that is a supported single point dataset.
-
diff --git a/doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst b/doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst
index d3b14e6184..53cae1bdf4 100644
--- a/doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst
+++ b/doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst
@@ -8,10 +8,10 @@ Running a single point using global data - PTS_MODE
``PTS_MODE`` enables you to run the model using global datasets, but just picking a single point from those datasets and operating on it. It can be a very quick way to do fast simulations and get a quick turnaround.
-To setup a ``PTS_MODE`` simulation you use the "-pts_lat" and "-pts_lon" arguments to **create_newcase** to give the latitude and longitude of the point you want to simulate for (the code will pick the point on the global grid nearest to the point you give. Here's an example to setup a simulation for the nearest point at 2-degree resolution to Boulder Colorado.
+To setup a ``PTS_MODE`` simulation you use the ``-pts_lat`` and ``-pts_lon`` arguments to ``cime/scripts/create_newcase`` to give the latitude and longitude of the point you want to simulate for (the code will pick the point on the global grid nearest to the point you give. Here's an example to setup a simulation for the nearest point at 2-degree resolution to Boulder Colorado.
::
- > cd scripts
+ > cd cime/scripts
> ./create_newcase -case testPTS_MODE -res f19_g17_gl4 -compset I1850Clm50BgcCropCru -pts_lat 40.0 -pts_lon -105
> cd testPTS_MODE
diff --git a/doc/source/users_guide/running-single-points/running-single-point-configurations.rst b/doc/source/users_guide/running-single-points/running-single-point-configurations.rst
index 9d2b68456b..0e7f1262e2 100644
--- a/doc/source/users_guide/running-single-points/running-single-point-configurations.rst
+++ b/doc/source/users_guide/running-single-points/running-single-point-configurations.rst
@@ -45,7 +45,7 @@ Example: Use global forcings at a site without its own special forcings
This example uses the single-point site in Brazil.
::
- > cd scripts
+ > cd cime/scripts
> set SITE=1x1_brazil
> ./create_newcase -case testSPDATASET -res $SITE -compset I2000Clm50SpGs
> cd testSPDATASET
@@ -59,7 +59,7 @@ The urban Mexico City test site has its own atmosphere forcing data (see Sect. :
::
- > cd scripts
+ > cd cime/scripts
# Set a variable to the site you want to use (as it's used several times below)
> set SITE=1x1_mexicocityMEX
> ./create_newcase -case testSPDATASET -res $SITE -compset I1PtClm50SpGs
@@ -74,14 +74,17 @@ Supported single-point runs for sites with their own atmospheric forcing
Of the supported single-point datasets we have three that also have atmospheric forcing data that go with them: Mexico City (Mexico), Vancouver, (Canada, British Columbia), and ``urbanc_alpha`` (test data for an Urban inter-comparison project). Mexico city and Vancouver also have namelist options in the source code for them to work with modified urban data parameters that are particular to these locations. To turn on the atmospheric forcing for these datasets, you set the ``env_run.xml DATM_MODE`` variable to ``CLM1PT``, and then the atmospheric forcing datasets will be used for the point picked. If you use one of the compsets that has "I1Pt" in the name that will be set automatically.
-When running with datasets that have their own atmospheric forcing you need to be careful to run over the period that data is available. If you have at least one year of forcing it will cycle over the available data over and over again no matter how long of a simulation you run. However, if you have less than a years worth of data (or if the start date doesn't start at the beginning of the year, or the end date doesn't end at the end of the year) then you won't be able to run over anything but the data extent. In this case you will need to carefully set the ``RUN_STARTDATE``, ``START_TOD`` and ``STOP_N/STOP_OPTION`` variables for your case to run over the entire time extent of your data. For the supported data points, these values are in the XML database and you can use the **queryDefaultNamelist.pl** script to query the values and set them for your case (they are set for the three urban test cases: Mexicocity, Vancouver, and urbanc_alpha).
+.. todo::
+ Update the below, as ``queryDefaultNamelist.pl`` no longer exists.
+
+When running with datasets that have their own atmospheric forcing you need to be careful to run over the period that data is available. If you have at least one year of forcing it will cycle over the available data over and over again no matter how long of a simulation you run. However, if you have less than a years worth of data (or if the start date doesn't start at the beginning of the year, or the end date doesn't end at the end of the year) then you won't be able to run over anything but the data extent. In this case you will need to carefully set the ``RUN_STARTDATE``, ``START_TOD`` and ``STOP_N/STOP_OPTION`` variables for your case to run over the entire time extent of your data. For the supported data points, these values are in the XML database and you can use the ``queryDefaultNamelist.pl`` script to query the values and set them for your case (they are set for the three urban test cases: Mexicocity, Vancouver, and urbanc_alpha).
Example: Use site-specific atmospheric forcings
-----------------------------------------------
In this example, we show how to use the atmospheric forcings specific to the Vancouver, Canada point.
::
- > cd scripts
+ > cd cime/scripts
# Set a variable to the site you want to use (as it's used several times below)
> set SITE=1x1_vancouverCAN
@@ -121,7 +124,7 @@ Example: Using CLM_USRDAT_NAME to run a simulation using user datasets for a spe
-----------------------------------------------------------------------------------------------------------------------
::
- > cd scripts
+ > cd cime/scripts
> ./create_newcase -case my_userdataset_test -res CLM_USRDAT -compset I2000Clm50BgcCruGs
> cd my_userdataset_test/
> set GRIDNAME=13x12pt_f19_alaskaUSA
@@ -142,6 +145,9 @@ The first step is to create the domain and surface datasets using the process ou
Example: Creating a surface dataset for a single point
---------------------------------------------------------------------
+.. todo::
+ Update the below, as ``mksurfdata.pl`` no longer exists and domain files aren't needed with nuopc.
+
::
# set the GRIDNAME and creation date that will be used later
@@ -176,6 +182,13 @@ The next step is to create a case that points to the files you created above. We
Example: Setting up a case from the single-point surface dataset just created
--------------------------------------------------------------------------------------------
+
+.. todo::
+ Change this to provide instructions for a CTSM checkout instead of a CESM one.
+
+.. todo::
+ Update the below, as domain files aren't needed with nuopc.
+
::
# First setup an environment variable that points to the top of the CESM directory.
@@ -188,7 +201,7 @@ Example: Setting up a case from the single-point surface dataset just created
# naming convention (leave off the creation date)
> cp $CESMROOT/$CTSMROOT/tools/mksurfdata_esmf/surfdata_${GRIDNAME}_simyr1850_$CDATE.nc \
$MYCSMDATA/lnd/clm2/surfdata_map/surfdata_${GRIDNAME}_simyr1850.nc
- > cd $CESMROOT/scripts
+ > cd $CESMROOT/cime/scripts
> ./create_newcase -case my_usernldatasets_test -res CLM_USRDAT -compset I1850Clm50BgcCropCru \
-mach cheyenne_intel
> cd my_usernldatasets_test
@@ -199,4 +212,4 @@ Example: Setting up a case from the single-point surface dataset just created
> ./xmlchange CLM_USRDAT_NAME=$GRIDNAME
> ./case.setup
-.. note:: With this and previous versions of the model we recommended using ``CLM_USRDAT_NAME`` as a way to identify your own datasets without having to enter them into the XML database. This has the down-side that you can't include creation dates in your filenames, which means you can't keep track of different versions by date. It also means you HAVE to rename the files after you created them with **mksurfdata.pl**. Now, since ``user_nl`` files are supported for ALL model components, and the same domain files are read by both CLM and DATM and set using the envxml variables: ``ATM_DOMAIN_PATH``, ``ATM_DOMAIN_FILE``, ``LND_DOMAIN_PATH``, and ``LND_DOMAIN_FILE`` -- you can use this mechanism (``user_nl_clm`` and ``user_nl_datm`` and those envxml variables) to point to your datasets in any location. In the future we will deprecate ``CLM_USRDAT_NAME`` and recommend ``user_nl_clm`` and ``user_nl_datm`` and the ``DOMAIN`` envxml variables.
+.. note:: With this and previous versions of the model we recommended using ``CLM_USRDAT_NAME`` as a way to identify your own datasets without having to enter them into the XML database. This has the down-side that you can't include creation dates in your filenames, which means you can't keep track of different versions by date. It also means you HAVE to rename the files after you created them with ``mksurfdata.pl``. Now, since ``user_nl`` files are supported for ALL model components, and the same domain files are read by both CLM and DATM and set using the envxml variables: ``ATM_DOMAIN_PATH``, ``ATM_DOMAIN_FILE``, ``LND_DOMAIN_PATH``, and ``LND_DOMAIN_FILE`` -- you can use this mechanism (``user_nl_clm`` and ``user_nl_datm`` and those envxml variables) to point to your datasets in any location. In the future we will deprecate ``CLM_USRDAT_NAME`` and recommend ``user_nl_clm`` and ``user_nl_datm`` and the ``DOMAIN`` envxml variables.
diff --git a/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst b/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst
index 34a199ebe8..61e1f25de8 100644
--- a/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst
+++ b/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst
@@ -8,7 +8,7 @@ Single and Regional Grid Configurations
CLM allows you to set up and run cases with a single-point or a local region as well as global resolutions. This is often useful for running quick cases for testing, evaluating specific vegetation types, or land-units, or running with observed data for a specific site.
-There are three different ways to do this for normal-supported site
+There are two different ways to do this for normal-supported site
``PTS_MODE``
runs for a single point using global datasets.
@@ -16,11 +16,6 @@ There are three different ways to do this for normal-supported site
``CLM_USRDAT_NAME``
runs using your own datasets (single-point or regional).
-``PTCLMmkdata``
- easily setup simulations to run for tower sites..
-
-.. note:: ``PTS_MODE`` and ``PTCLMmkdata`` only works for a single point, while the other two options can also work for regional datasets as well.
-
.. _options-for-single-points:
=========================================
@@ -29,11 +24,9 @@ There are three different ways to do this for normal-supported site
Running for a *normal supported site* is a great solution, if one of the supported single-point/regional datasets, is your region of interest (see :ref:`running-single-point-datasets`). All the datasets are created for you, and you can easily select one and run, out of the box with it using a supported resolution from the top level of the CESM scripts. The problem is that there is a very limited set of supported datasets. You can also use this method for your own datasets, but you have to create the datasets, and add them to the XML database in scripts, CLM and to the DATM. This is worthwhile if you want to repeat many multiple cases for a given point or region.
-In general :ref:`pts_mode` is the quick and dirty method that gets you started without having to create datasets -- but has limitations. It's good for an initial attempt at seeing results for a point of interest, but since you can NOT restart with it, it's usage is limited. It is the quickest method as you can create a case for it directly from **create_newcase**. Although you can't restart, running a single point is very fast, and you can run for long simulation times even without restarts.
+In general :ref:`pts_mode` is the quick and dirty method that gets you started without having to create datasets -- but has limitations. It's good for an initial attempt at seeing results for a point of interest, but since you can NOT restart with it, it's usage is limited. It is the quickest method as you can create a case for it directly from ``cime/scripts/create_newcase``. Although you can't restart, running a single point is very fast, and you can run for long simulation times even without restarts.
Next, ``CLM_USRDAT_NAME`` is the best way to setup cases quickly where you have to create your own datasets (see :ref:`running-single-point-datasets`). With this method you don't have to change DATM or add files to the XML database -- but you have to follow a strict naming convention for files. However, once the files are named and in the proper location, you can easily setup new cases that use these datasets. This is good for treating all the required datasets as a "group" and for a particular model version. For advanced CLM developers who need to track dataset changes with different model versions you would be best off adding these datasets as supported datasets with the "normal supported datasets" method.
-Lastly *PTCLMmkdata* is a great way to easily create datasets, setup simulations and run simulations for tower sites. It takes advantage of both normal supported site functionality and CLM_USRDAT_NAME internally. A big advantage to it, is that it's one-stop shopping, it runs tools to create datasets, and runs **create_newcase** and sets the appropriate env variables for you. So you only have to learn how to run one tool, rather than work with many different ones. PTCLMmkdata is described in the next chapter, :ref:`running-PTCLM`.
-
-Finally, if you also have meteorology data that you want to force your CLM simulations with you'll need to setup cases as described in :ref:`creating-your-own-singlepoint-dataset`. You'll need to create CLM datasets either according to ``CLM_USRDAT_NAME``. You may also need to modify DATM to use your forcing data. And you'll need to change your forcing data to be in a format that DATM can use. :ref:`converting-ameriflux-for-ptclmmkdata` tells you how to use AmeriFlux data for atmospheric forcing.
+Finally, if you also have meteorology data that you want to force your CLM simulations with you'll need to setup cases as described in :ref:`creating-your-own-singlepoint-dataset`. You'll need to create CLM datasets either according to ``CLM_USRDAT_NAME``. You may also need to modify DATM to use your forcing data. And you'll need to change your forcing data to be in a format that DATM can use.
diff --git a/doc/source/users_guide/running-special-cases/Running-stand-alone-CLM-with-transient-historical-CO2-concentration.rst b/doc/source/users_guide/running-special-cases/Running-stand-alone-CLM-with-transient-historical-CO2-concentration.rst
deleted file mode 100644
index f93bcca2f0..0000000000
--- a/doc/source/users_guide/running-special-cases/Running-stand-alone-CLM-with-transient-historical-CO2-concentration.rst
+++ /dev/null
@@ -1,39 +0,0 @@
-.. include:: ../substitutions.rst
-
-.. _running-with-historical-co2-forcing:
-
-=====================================
- Running with historical CO2 forcing
-=====================================
-
-In this case you want to run a simulation with stand-alone CLM responding to changes in CO2 for a historical period. For this example, we will start with the "I_1850-2000_CN" compset that has transient: land-use, Nitrogen and Aerosol deposition already. You could also use another compset if you didn't want these other features to be transient. In order to get CO2 to be transient we need to add a new streams file and add it to the list of streams in the user_nl_datm file. You also need a NetCDF datafile that datm can read that gives the variation. You could supply your own file, but we have a standard file that is used by CAM for this and our example will make use of this file.
-
-.. note:: Most everything here has to do with changing datm rather than CLM to allow this to happen. As such the user that wishes to do this should first become more familiar with datm and read the `CESM Data Model User's Guide `_ especially as it pertains to the datm.
-
-.. warning:: This section documents the process for doing something that is non-standard. There may be errors with the documentation and process, and you may have to do some work before all of this works for you. If that is the case, we recommend that you do further research into understanding the process and the files, as well as understanding the datm and how it works. You may have to read documentation found in the code for datm as well as "csm_share".
-
-The datm has "streams" files that have rough XML-like syntax and specify the location and file to get data from, as well as information on the variable names and the data locations of the grid points. The datm expects specific variable names and the datm "maps" the expected variable names from the file to the names expected by datm. The file we are working with here is a file with a single-point, that covers the entire globe (so the vertices go from -90 to 90 degrees in latitude and 0 to 360 degrees in longitude). Since it's a single point it's a little easier to work with than datasets that may be at a given horizontal resolution. The datm also expects that variables will be in certain units, and only expects a limited number of variables so arbitrary fields can NOT be exchanged this way. However, the process would be similar for datasets that do contain more than one point.
-
-The three things that are needed: a domain file, a data file, and a streams text file. The domain file is a CF-compliant NetCDF file that has information on the grid points (latitudes and longitudes for cell-centers and vertices, mask, fraction, and areas). The datafile is a CF-compliant NetCDF file with the data that will be mapped. The streams text file is the XML-like file that tells datm how to find the files and how to map the variables datm knows about to the variable names on the NetCDF files. Note, that in our case the domain file and the data file are the same file. In other cases, the domain file may be separate from the data file.
-
-First we are going to create a case, and we will edit the ``user_nl_datm`` so that we add a CO2 data stream in. There is a streams text file available in ``$CTSMROOT/doc/UsersGuide/co2_streams.txt``, that includes file with a CO2 time-series from 1765 to 2007.
-
-Example: Transient Simulation with Historical CO2
---------------------------------------------------------------
-::
-
- > cd scripts
- > ./create_newcase -case DATM_CO2_TSERIES -res f19_g17_gl4 -compset IHistClm50BgcCrop
- > cd DATM_CO2_TSERIES
-
- # Historical CO2 will already be setup correctly for this compset
- # to check that look at the variables: CCSM_BGC,CLM_CO2_TYPE, and DATM_CO2_TSERIES
- > ./xmlquery CCSM_BGC,CLM_CO2_TYPE,DATM_CO2_TSERIES
- # Expect: CCSM_BGC=CO2A,CLM_CO2_TYPE=diagnostic,DATM_CO2_TSERIES=20tr
- > ./case.setup
-
- # Run preview namelist so we have the namelist in CaseDocs
- > ./preview_namelists
-
-Once, you've done that you can build and run your case normally.
-
diff --git a/doc/source/users_guide/running-special-cases/Running-the-prognostic-crop-model.rst b/doc/source/users_guide/running-special-cases/Running-the-prognostic-crop-model.rst
index 7e19af8678..56620b2fde 100644
--- a/doc/source/users_guide/running-special-cases/Running-the-prognostic-crop-model.rst
+++ b/doc/source/users_guide/running-special-cases/Running-the-prognostic-crop-model.rst
@@ -6,13 +6,13 @@
Running the prognostic crop model
===================================
-The prognostic crop model is setup to work with CLM4.0, CLM4.5 or |version| with either BGC or CN (with or without DV). In order to use the initial condition file, we need to set the ``RUN_TYPE`` to startup rather than ``hybrid`` since the compset for f19 sets up to use an initial condition file without crop active. To activate the crop model you can choose a compset that has "Crop" in the name such as "I1850Clm50BgcCropCru" or simply add "-crop" to ``CLM_BLDNML_OPTS`` (or for CLM4.0 add "-crop on" to ``CLM_CONFIG_OPTS``).
+The prognostic crop model is setup to work with CLM4.0, CLM4.5 or |version| with either BGC or CN (with or without DV). In order to use the initial condition file, we need to set the ``RUN_TYPE`` to startup rather than ``hybrid`` since the compset for f19 sets up to use an initial condition file without crop active. To activate the crop model you can choose a compset that has "Crop" in the name such as "I1850Clm50BgcCropCru" or simply add ``-crop`` to ``CLM_BLDNML_OPTS`` (or for CLM4.0 add ``-crop on`` to ``CLM_CONFIG_OPTS``).
Example: Crop Simulation
------------------------------------
::
- > cd scripts
+ > cd cime/scripts
> ./create_newcase -case CROP -res f19_g17_gl4 -compset I1850Clm50BgcCropCru
> cd CROP
diff --git a/doc/source/users_guide/running-special-cases/Running-with-MOAR-data-as-atmospheric-forcing-to-spinup-the-model.rst b/doc/source/users_guide/running-special-cases/Running-with-MOAR-data-as-atmospheric-forcing-to-spinup-the-model.rst
index dbe01c497c..769755937d 100644
--- a/doc/source/users_guide/running-special-cases/Running-with-MOAR-data-as-atmospheric-forcing-to-spinup-the-model.rst
+++ b/doc/source/users_guide/running-special-cases/Running-with-MOAR-data-as-atmospheric-forcing-to-spinup-the-model.rst
@@ -14,7 +14,7 @@ Example: Simulation with MOAR Data on cheyenne
-------------------------------------------------------------
::
- > cd scripts
+ > cd cime/scripts
> ./create_newcase -case MOARforce1850 -res f19_g17_gl4 -compset I1850Clm50BgcSpinup
> cd MOARforce1850
# The following sets the casename to point to for atm forcing (you could also use an editor)
diff --git a/doc/source/users_guide/running-special-cases/Running-with-irrigation.rst b/doc/source/users_guide/running-special-cases/Running-with-irrigation.rst
index f19b489731..5e9adb4b6a 100644
--- a/doc/source/users_guide/running-special-cases/Running-with-irrigation.rst
+++ b/doc/source/users_guide/running-special-cases/Running-with-irrigation.rst
@@ -6,14 +6,17 @@
Running with irrigation
===================================
-In CLM4.0 irrigation isn't an allowed option. In CLM4.5 irrigation can ONLY be used WITH crop. With CLM5.0 irrigation can be used whether crop is on or not -- **BUT** if crop is off, your surface datasets **HAVE** to have irrigation defined appropriately. Right now *ALL* surface datasets without crop enabled have irrigation hard-wired on. In order to create datasets with irrigation off, you'd need to make changes to ``mksurfdata_esmf`` in order to have all generic crops to be non-irrigated. To turn on irrigation in |version| we simply add "-irrig on" to ``CLM_BLDNML_OPTS``.
+.. todo::
+ Remove refs to pre-5.0 behavior?
+
+In CLM4.0 irrigation isn't an allowed option. In CLM4.5 irrigation can ONLY be used WITH crop. With CLM5.0 irrigation can be used whether crop is on or not -- **BUT** if crop is off, your surface datasets **HAVE** to have irrigation defined appropriately. Right now *ALL* surface datasets without crop enabled have irrigation hard-wired on. In order to create datasets with irrigation off, you'd need to make changes to ``mksurfdata_esmf`` in order to have all generic crops to be non-irrigated. To turn on irrigation in |version| we simply add ``-irrig on`` to ``CLM_BLDNML_OPTS``.
Example: Irrigation Simulation
------------------------------------------
::
# Note here we do a CLMSP simulation as that is what has been validated
- > cd scripts
+ > cd cime/scripts
> ./create_newcase -case IRRIG -res f19_g17_gl4 -compset I1850Clm50BgcCrop
> cd IRRIG
diff --git a/doc/source/users_guide/running-special-cases/Running-with-your-own-previous-simulation-as-atmospheric-forcing-to-spinup-the-model.rst b/doc/source/users_guide/running-special-cases/Running-with-your-own-previous-simulation-as-atmospheric-forcing-to-spinup-the-model.rst
index ff05836f6e..2a55cbed1e 100644
--- a/doc/source/users_guide/running-special-cases/Running-with-your-own-previous-simulation-as-atmospheric-forcing-to-spinup-the-model.rst
+++ b/doc/source/users_guide/running-special-cases/Running-with-your-own-previous-simulation-as-atmospheric-forcing-to-spinup-the-model.rst
@@ -14,7 +14,7 @@ Example: Fully Coupled Simulation to Create Data to Force Next Example Simulatio
----------------------------------------------------------------------------------------------
::
- > cd scripts
+ > cd cime/scripts
> ./create_newcase -case myB1850 -res f09_g17_gl4 -compset B1850
> cd myB1850
> ./case.setup
@@ -44,7 +44,7 @@ Example: Simulation Forced with Data from the Previous Simulation
------------------------------------------------------------------------------
::
- > cd scripts
+ > cd cime/scripts
> ./create_newcase -case frcwmyB1850 -res f09_g17_gl4 -compset I1850Clm50BgcSpinup
> cd frcWmyB1850
# The following sets the casename to point to for atm forcing (you could also use an editor)
diff --git a/doc/source/users_guide/running-special-cases/Spinning-up-the-biogeochemistry-BGC-spinup.rst b/doc/source/users_guide/running-special-cases/Spinning-up-the-biogeochemistry-BGC-spinup.rst
index cc266506a8..8376c280b1 100644
--- a/doc/source/users_guide/running-special-cases/Spinning-up-the-biogeochemistry-BGC-spinup.rst
+++ b/doc/source/users_guide/running-special-cases/Spinning-up-the-biogeochemistry-BGC-spinup.rst
@@ -6,7 +6,7 @@
Spinup of |version|-BGC-Crop
=============================
-To get the |version|-BGC model to a steady state, you first run it from arbitrary initial conditions using the "accelerated decomposition spinup" (-bgc_spinup on in CLM **configure**, see example below) mode for about 200 simulation years. :numref:`Figure BGC AD spinup plot for 1850 GSWP3` shows spinup behavior for an 1850 BGC accelerated decomposition (AD) case using GSWP3 atmospheric forcing. Generally, the criteria that less than 3% of the land surface be in total ecosystem carbon disequilibrium takes the longest to satisfy due to slow soil carbon (TOTSOMC) turnover times in the Arctic.
+To get the |version|-BGC model to a steady state, you first run it from arbitrary initial conditions using the "accelerated decomposition spinup" (``-bgc_spinup on`` in CLM ``configure``, see example below) mode for about 200 simulation years. :numref:`Figure BGC AD spinup plot for 1850 GSWP3` shows spinup behavior for an 1850 BGC accelerated decomposition (AD) case using GSWP3 atmospheric forcing. Generally, the criteria that less than 3% of the land surface be in total ecosystem carbon disequilibrium takes the longest to satisfy due to slow soil carbon (TOTSOMC) turnover times in the Arctic.
.. _Figure BGC AD spinup plot for 1850 GSWP3:
@@ -14,7 +14,7 @@ To get the |version|-BGC model to a steady state, you first run it from arbitrar
BGC AD spinup plot for a year 1850 case with GSWP3 atmospheric forcing. Variables examined are TOTECOSYSC (total ecosystem carbon), TOTSOMC (total soil organic matter carbon), TOTVEGC (total vegetation carbon), TLAI (total leaf area index), GPP (gross primary production) and TWS (total water storage). Generated using .../tools/contrib/SpinupStability.ncl.
-After this you branch from this mode in the "final spinup" (-bgc_spinup off in CLM **configure**, see example below), and run for several hundred simulation years. :numref:`Figure BGC pAD spinup plot for 1850 GSWP3` shows spinup behavior for an 1850 BGC post accelerated decomposition (pAD) case using GSWP3 atmospheric forcing. As before, the criteria that less than 3% of the land surface be in total ecosystem carbon disequilibrium takes the longest to satisfy. It can be difficult to meet this strict criteria in less than 1000 years and users may want to relax this criteria depending on their application.
+After this you branch from this mode in the "final spinup" (``-bgc_spinup off`` in CLM ``configure``, see example below), and run for several hundred simulation years. :numref:`Figure BGC pAD spinup plot for 1850 GSWP3` shows spinup behavior for an 1850 BGC post accelerated decomposition (pAD) case using GSWP3 atmospheric forcing. As before, the criteria that less than 3% of the land surface be in total ecosystem carbon disequilibrium takes the longest to satisfy. It can be difficult to meet this strict criteria in less than 1000 years and users may want to relax this criteria depending on their application.
.. _Figure BGC pAD spinup plot for 1850 GSWP3:
@@ -41,13 +41,13 @@ You can also start from a default initial file that is setup as part of the sele
If you use the default initial file and you signficantly change model behavior or atmospheric forcing, and you are concerned about the carbon equilibrium (e.g., TOTECOSYSC, TOTSOMC, TOTVEGC), particularly at high latitudes, then we recommend you put the model back into AD mode to reach a new equilibrium. In this configuration, this will also automatically reseed "dead" plant functional types in the initial file with a bit of leaf carbon to give those plant functional types another chance to grow under the new atmospheric forcing or model conditions.
**1. |version| accelerated-decomposition (AD) spinup**
- For the first step of running 200+ years in "-bgc_spinup on" mode, you will setup a case, and then edit the values in env_build.xml and env_run.xml so that the right configuration is turned on and the simulation is setup to run for the required length of simulation time. So do the following:
+ For the first step of running 200+ years in ``-bgc_spinup on`` mode, you will setup a case, and then edit the values in env_build.xml and env_run.xml so that the right configuration is turned on and the simulation is setup to run for the required length of simulation time. So do the following:
-Example:: AD_SPINUP Simulation for |version|-BGC
+Example: AD_SPINUP Simulation for |version|-BGC
--------------------------------------------------------
::
- > cd scripts
+ > cd cime/scripts
> ./create_newcase -case BGC_spinup -res f19_g17_gl4 -compset I1850Clm50BgcCropCru
> cd BGC_spinup
# Change accelerated spinup mode
@@ -67,7 +67,7 @@ Example:: AD_SPINUP Simulation for |version|-BGC
Afterwards save the last restart file from this simulation to use in the next step.
**2. Final spinup for |version|-BGC**
- Next save the last restart file from this step and use it as the "finidat" file to use for one more spinup for at least 400+ years in normal mode. So do the following:
+ Next save the last restart file from this step and use it as the ``finidat`` file to use for one more spinup for at least 400+ years in normal mode. So do the following:
.. _eg-final-clmbgc-spinup:
@@ -75,7 +75,7 @@ Example: Final CLMBGC Spinup Simulation for |version|-BGC
------------------------------------------------------------------
::
- > cd scripts
+ > cd cime/scripts
> ./create_newcase -case BGC_finalspinup -res f19_g17_gl4 -compset I1850Clm50BgcCropCru
> cd BGC_finalspinup
# Now, Copy the last CLM restart file from the earlier case into your run directory
diff --git a/doc/source/users_guide/running-special-cases/index.rst b/doc/source/users_guide/running-special-cases/index.rst
index 9173825d04..31d5a3b148 100644
--- a/doc/source/users_guide/running-special-cases/index.rst
+++ b/doc/source/users_guide/running-special-cases/index.rst
@@ -24,5 +24,4 @@ Running Special Cases
Running-with-excess-ground-ice.rst
Running-with-MOAR-data-as-atmospheric-forcing-to-spinup-the-model.rst
Running-with-your-own-previous-simulation-as-atmospheric-forcing-to-spinup-the-model.rst
- Running-stand-alone-CLM-with-transient-historical-CO2-concentration.rst
Running-with-anomaly-forcing.rst
diff --git a/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst b/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst
index a1efac897e..7cd8ce9c9c 100644
--- a/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst
+++ b/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst
@@ -6,17 +6,17 @@
Customizing CLM's Configuration
********************************
-The section of the |cesmrelease| Quickstart `CESM Create a Case `_ gives instructions on creating a case. Also see a similar section in the CIME User's-Guide `CIME Create a case `_. What is of interest here is how to customize your use of CLM for the case that you created.
+The section of the |cesmrelease| Quickstart `CESM Create a Case `_ gives instructions on creating a case. Also see a similar section in the CIME User's Guide `CIME Create a case `_. What is of interest here is how to customize your use of CLM for the case that you created.
-For CLM when **preview_namelist**, **case.build**, or **case.run** are called there are two steps that take place:
+For CLM when ``preview_namelist``, ``case.build``, or ``case.run`` are called there are two steps that take place:
-1. The CLM "**configure**" script is called to setup the build-time configuration for CLM (see :ref:`more-info-clm-config-script`). The env variables for **configure** are locked after the **case.build** step. So the results of the CLM **configure** are locked after the build has taken place.
+1. The CLM ``configure`` script is called to setup the build-time configuration for CLM (see :ref:`more-info-clm-config-script`). The env variables for ``configure`` are locked after the ``case.build`` step. So the results of the CLM ``configure`` are locked after the build has taken place.
-2. The CLM "**build-namelist**" script is called to generate the run-time namelist for CLM (more information on **build-namelist** is given below in :ref:`def-nl-items-and-defaults`).
+2. The CLM ``build-namelist`` script is called to generate the run-time namelist for CLM (more information on ``build-namelist`` is given below in :ref:`def-nl-items-and-defaults`).
-When customizing your case at the **case.setup** step you are able to modify the process by effecting either one or both of these steps. The CLM "**configure**" and "**build-namelist**" scripts are both available in the "$CTSMROOT/bld" directory in the distribution. Both of these scripts have a "-help" option that is useful to examine to see what types of options you can give either of them.
+When customizing your case at the ``case.setup`` step you are able to modify the process by effecting either one or both of these steps. The CLM ``configure`` and ``build-namelist`` scripts are both available in the ``$CTSMROOT/bld`` directory in the distribution. Both of these scripts have a ``-help`` option that is useful to examine to see what types of options you can give either of them.
-There are five different types of customization for the configuration that we will discuss: |version| in |cesmrelease| build-time options, |version| in |cesmrelease| run-time options, User Namelist, other noteworthy |cesmrelease| configuration items, the CLM **configure** script options, and the CLM **build-namelist** script options.
+There are five different types of customization for the configuration that we will discuss: |version| in |cesmrelease| build-time options, |version| in |cesmrelease| run-time options, User Namelist, other noteworthy |cesmrelease| configuration items, the CLM ``configure`` script options, and the CLM ``build-namelist`` script options.
Information on all of the CLM script, configuration, build and run items is found under ``$CTSMROOT/cime_config/config_component.xml``. See `CLM CASEROOT Variable Definitions `_.
@@ -38,12 +38,12 @@ Below we list each of the CESM configuration items that are specific to CLM. All
CLM_USRDAT_NAME
COMP_LND
-For the precedence of the different options to **build-namelist** see the section on precedence below.
+For the precedence of the different options to ``build-namelist`` see the section on precedence below.
The first item ``CLM_CONFIG_OPTS`` has to do with customizing the CLM build-time options for your case, the rest all have to do with generating the namelist.
CLM_CONFIG_OPTS
- The option ``CLM_CONFIG_OPTS`` is all about passing command line arguments to the CLM **configure** script. It is important to note that some compsets, may already put a value into the ``CLM_CONFIG_OPTS`` variable. You can still add more options to your ``CLM_CONFIG_OPTS`` but make sure you add to what is already there rather than replacing it. Hence, we recommend using the "-append" option to the xmlchange script. In :ref:`more-info-clm-config-script` below we will go into more details on options that can be customized in the CLM "**configure**" script. It's also important to note that the **$CTSMROOT/cime_config/buildnml** script may already invoke certain CLM **configure** options and as such those command line options are NOT going to be available to change at this step (nor would you want to change them). The options to CLM **configure** are given with the "-help" option which is given in :ref:`more-info-clm-config-script`... note:: ``CLM_CONFIG_OPTS`` is locked after the **case.build** script is run. If you want to change something in ``CLM_CONFIG_OPTS`` you'll need to clean the build and rerun **case.build**. The other env variables can be changed at run-time so are never locked.
+ The option ``CLM_CONFIG_OPTS`` is all about passing command line arguments to the CLM ``configure`` script. It is important to note that some compsets, may already put a value into the ``CLM_CONFIG_OPTS`` variable. You can still add more options to your ``CLM_CONFIG_OPTS`` but make sure you add to what is already there rather than replacing it. Hence, we recommend using the ``-append`` option to the xmlchange script. In :ref:`more-info-clm-config-script` below we will go into more details on options that can be customized in the CLM ``configure`` script. It's also important to note that the ``$CTSMROOT/cime_config/buildnml`` script may already invoke certain CLM ``configure`` options and as such those command line options are NOT going to be available to change at this step (nor would you want to change them). The options to CLM ``configure`` are given with the ``-help`` option which is given in :ref:`more-info-clm-config-script`... note:: ``CLM_CONFIG_OPTS`` is locked after the ``case.build`` script is run. If you want to change something in ``CLM_CONFIG_OPTS`` you'll need to clean the build and rerun ``case.build``. The other env variables can be changed at run-time so are never locked.
CLM_NML_USE_CASE
``CLM_NML_USE_CASE`` is used to set a particular set of conditions that set multiple namelist items, all centering around a particular usage of the model. (See :ref:`precedence-of-opts` for the precedence of this option relative to the others.) To list the valid options do the following:
@@ -101,7 +101,7 @@ CLM_NML_USE_CASE
stdurbpt_pd = Standard Urban Point Namelist Settings
CLM_BLDNML_OPTS
- The option CLM_BLDNML_OPTS is for passing options to the CLM "build-namelist" script. As with the CLM "configure" script the CLM $CTSMROOT/cime_config/buildnml may already invoke certain options and as such those options will NOT be available to be set here. The best way to see what options can be sent to the "build-namelist" script is to do
+ The option CLM_BLDNML_OPTS is for passing options to the CLM ``build-namelist`` script. As with the CLM ``configure`` script the CLM $CTSMROOT/cime_config/buildnml may already invoke certain options and as such those options will NOT be available to be set here. The best way to see what options can be sent to the ``build-namelist`` script is to do
::
> cd $CTSMROOT/bld
@@ -256,43 +256,45 @@ CLM_BLDNML_OPTS
(i.e. CLM_NML_USE_CASE env_run variable)
6. values from the namelist defaults file.
-The **$CTSMROOT/cime_config/buildnml** script already sets the resolution and mask as well as the CLM **configure** file, and defines an input namelist and namelist input file, and the output namelist directory, and sets the start-type (from ``RUN_TYPE``), namelist options (from ``CLM_NAMELIST_OPTS``), co2_ppmv (from ``CCSM_CO2_PPMV``, co2_type (from ``CLM_CO2_TYPE``), lnd_frac (from ``LND_DOMAIN_PATH`` and ``LND_DOMAIN_FILE``), l_ncpl (from ``LND_NCPL``, glc_grid, glc_smb, glc_nec (from ``GLC_GRID``, ``GLC_SMB``, and ``GLC_NEC``), and "clm_usr_name" is set (to ``CLM_USRDAT_NAME >``when the grid is set to ``CLM_USRDAT_NAME``. Hence only the following different options can be set:
+The ``$CTSMROOT/cime_config/buildnml`` script already sets the resolution and mask as well as the CLM ``configure`` file, and defines an input namelist and namelist input file, and the output namelist directory, and sets the: start-type (from ``RUN_TYPE``); namelist options (from ``CLM_NAMELIST_OPTS``); ``co2_ppmv`` (from ``CCSM_CO2_PPMV``); ``co2_type`` (from ``CLM_CO2_TYPE``); ``lnd_frac`` (from ``LND_DOMAIN_PATH`` and ``LND_DOMAIN_FILE``); ``l_ncpl`` (from ``LND_NCPL``); ``glc_grid``, ``glc_smb``, ``glc_nec`` (from ``GLC_GRID``, ``GLC_SMB``, and ``GLC_NEC``); and ``clm_usr_name`` (to ``CLM_USRDAT_NAME``). Hence only the following different options can be set:
-1.
--bgc_spinup
+#. ``-bgc_spinup``
-#. -chk_res
+#. ``-chk_res``
-#. -clm_demand
+#. ``-clm_demand``
-#. -drydep
+#. ``-drydep``
-#. -fire_emis
+#. ``-fire_emis``
-#. -ignore_ic_date
+#. ``-ignore_ic_date``
-#. -ignore_ic_year
+#. ``-ignore_ic_year``
-#. -irrig
+#. ``-irrig``
-#. -no-megan
+#. ``-no-megan``
-#. -note
+#. ``-note``
-#. -rcp
+#. ``-rcp``
-#. -sim_year
+#. ``-sim_year``
-#. -verbose
+#. ``-verbose``
-"-bgc_spinup" is an option only available for |version| for any configuration when CN is turned on (so either CLMCN or CLMBGC). It can be set to "on" or "off". If "on" the model will go into Accelerated Decomposition mode, while for "off" (the default) it will have standard decomposition rates. If you are starting up from initial condition files the model will check what mode the initial condition file is in and do the appropriate action on the first time-step to change the Carbon pools to the appropriate spinup setting. See :ref:`spinning-up-clm-bgc` for an example using this option.
+``-bgc_spinup`` is an option only available for |version| for any configuration when CN is turned on (so either CLMCN or CLMBGC). It can be set to "on" or "off". If "on" the model will go into Accelerated Decomposition mode, while for "off" (the default) it will have standard decomposition rates. If you are starting up from initial condition files the model will check what mode the initial condition file is in and do the appropriate action on the first time-step to change the Carbon pools to the appropriate spinup setting. See :ref:`spinning-up-clm-bgc` for an example using this option.
-"-chk_res" ensures that the resolution chosen is supported by CLM. If the resolution is NOT supported it will cause the CLM **build-namelist** to abort when run. So when either **preview_namelist**, **case.build** or **case.run** is executed it will abort early. Since, the CESM scripts only support certain resolutions anyway, in general this option is NOT needed in the context of running CESM cases.
+.. todo::
+ Update the above.
-"-clm_demand" asks the **build-namelist** step to require that the list of variables entered be set. Typically, this is used to require that optional filenames be used and ensure they are set before continuing. For example, you may want to require that fpftdyn be set to get dynamically changing vegetation types. To do this you would do the following.
+``-chk_res`` ensures that the resolution chosen is supported by CLM. If the resolution is NOT supported it will cause the CLM ``build-namelist`` to abort when run. So when either ``preview_namelist``, ``case.build`` or ``case.run`` is executed it will abort early. Since, the CESM scripts only support certain resolutions anyway, in general this option is NOT needed in the context of running CESM cases.
+
+``-clm_demand`` asks the ``build-namelist`` step to require that the list of variables entered be set. Typically, this is used to require that optional filenames be used and ensure they are set before continuing. For example, you may want to require that fpftdyn be set to get dynamically changing vegetation types. To do this you would do the following.
::
- > ./xmlchange CLM_BLDNML_OPTS="-clm_demand fpftdyn"``
+ > ./xmlchange CLM_BLDNML_OPTS="-clm_demand fpftdyn"
To see a list of valid variables that you could set do this:
::
@@ -302,25 +304,25 @@ To see a list of valid variables that you could set do this:
.. note:: Using a 20th-Century transient compset or the ``20thC_transient`` use-case using ``CLM_NML_USE_CASE`` would set this as well, but would also use dynamic nitrogen and aerosol deposition files, so using ``-clm_demand`` would be a way to get *just* dynamic vegetation types and NOT the other files as well.
-"-drydep" adds a dry-deposition namelist for testing to the driver. This is a driver namelist, but adding the option here has CLM **build-namelist** create the ``drv_flds_in`` file that the driver will copy over and use. Invoking this option does have an impact on performance even for I compsets and will slow the model down. It's also only useful when running with an active atmosphere model that makes use of this information.
+``-drydep`` adds a dry-deposition namelist for testing to the driver. This is a driver namelist, but adding the option here has CLM ``build-namelist`` create the ``drv_flds_in`` file that the driver will copy over and use. Invoking this option does have an impact on performance even for I compsets and will slow the model down. It's also only useful when running with an active atmosphere model that makes use of this information.
-"-ignore_ic_date" ignores the Initial Conditions (IC) date completely for finding initial condition files to startup from. Without this option or the "-ignore_ic_year" option below, the date of the file comes into play.
+``-ignore_ic_date`` ignores the Initial Conditions (IC) date completely for finding initial condition files to startup from. Without this option or the ``-ignore_ic_year`` option below, the date of the file comes into play.
-"-ignore_ic_year" ignores the Initial Conditions (IC) year for finding initial condition files to startup from. The date is used, but the year is ignored. Without this option or the "-ignore_ic_date" option below, the date and year of the file comes into play.
+``-ignore_ic_year`` ignores the Initial Conditions (IC) year for finding initial condition files to startup from. The date is used, but the year is ignored. Without this option or the ``-ignore_ic_date`` option below, the date and year of the file comes into play.
-When "-irrig on" is used **build-namelist** will try to find surface datasets that have the irrigation model enabled (when running with Sattellitte Phenology). When running with the prognostic crop model on, "-irrig on" will turn irrigate crops on, while "-irrig off" will manage all crop areas as rain-fed without irrigation.
+When ``-irrig on`` is used ``build-namelist`` will try to find surface datasets that have the irrigation model enabled (when running with Sattellitte Phenology). When running with the prognostic crop model on, ``-irrig on`` will turn crop irrigation on, while ``-irrig off`` will manage all crop areas as rain-fed without irrigation.
-"no-megan" means do NOT add a MEGAN model Biogenic Volatile Organic Compounds (BVOC) testing namelist to the driver. This namelist is created by default, so normally this WILL be done. This is a driver namelist, so unless "no-megan" is specified the CLM **build-namelist** will create the ``drv_flds_in`` file that the driver will copy over and use (if you are running with CAM and CAM produces this file as well, it's file will have precedence).
+``no-megan`` means do NOT add a MEGAN model Biogenic Volatile Organic Compounds (BVOC) testing namelist to the driver. This namelist is created by default, so normally this WILL be done. This is a driver namelist, so unless ``no-megan`` is specified the CLM ``build-namelist`` will create the ``drv_flds_in`` file that the driver will copy over and use. (If you are running with CAM and CAM produces this file as well, its file will have precedence).
-"-note" adds a note to the bottom of the namelist file, that gives the details of how **build-namelist** was called, giving the specific command-line options given to it.
+``-note`` adds a note to the bottom of the namelist file, that gives the details of how ``build-namelist`` was called, giving the specific command-line options given to it.
-"-rcp" is used to set the representative concentration pathway for the future scenarios you want the data-sets to simulate conditions for, in the input datasets. To list the valid options do the following:
+``-rcp`` is used to set the representative concentration pathway for the future scenarios you want the data-sets to simulate conditions for, in the input datasets. To list the valid options do the following:
::
> cd $CTSMROOT/doc
> ../bld/build-namelist -rcp list
-"-sim_year" is used to set the simulation year you want the data-sets to simulate conditions for in the input datasets. The simulation "year" can also be a range of years in order to do simulations with changes in the dataset values as the simulation progresses. To list the valid options do the following:
+``-sim_year`` is used to set the simulation year you want the data-sets to simulate conditions for in the input datasets. The simulation ``year`` can also be a range of years in order to do simulations with changes in the dataset values as the simulation progresses. To list the valid options do the following:
::
> cd $CTSMROOT/doc
@@ -346,8 +348,11 @@ When "-irrig on" is used **build-namelist** will try to find surface datasets th
``CLM_FORCE_COLDSTART``
when set to on, *requires* that your simulation do a cold start from arbitrary initial conditions. If this is NOT set, it will use an initial condition file if it can find an appropriate one, and otherwise do a cold start. ``CLM_FORCE_COLDSTART`` is a good way to ensure that you are doing a cold start if that is what you want to do.
+.. todo::
+ Update the below, as ``queryDefaultNamelist.pl`` no longer exists.
+
``CLM_USRDAT_NAME``
- Provides a way to enter your own datasets into the namelist. The files you create must be named with specific naming conventions outlined in :ref:`creating-your-own-singlepoint-dataset`. To see what the expected names of the files are, use the **queryDefaultNamelist.pl** to see what the names will need to be. For example if your ``CLM_USRDAT_NAME`` will be "1x1_boulderCO", with a "navy" land-mask, constant simulation year range, for 1850, the following will list what your filenames should be:
+ Provides a way to enter your own datasets into the namelist. The files you create must be named with specific naming conventions outlined in :ref:`creating-your-own-singlepoint-dataset`. To see what the expected names of the files are, use the ``queryDefaultNamelist.pl`` to see what the names will need to be. For example if your ``CLM_USRDAT_NAME`` will be "1x1_boulderCO", with a "navy" land-mask, constant simulation year range, for 1850, the following will list what your filenames should be:
::
> cd $CTSMROOT/bld
@@ -356,13 +361,13 @@ When "-irrig on" is used **build-namelist** will try to find surface datasets th
An example of using ``CLM_USRDAT_NAME`` for a simulation is given in Example :numref:`creating-your-own-singlepoint-dataset`.
``CLM_CO2_TYPE``
- sets the type of input CO2 for either "constant", "diagnostic" or prognostic". If "constant" the value from ``CCSM_CO2_PPMV`` will be used. If "diagnostic" or "prognostic" the values MUST be sent from the atmosphere model. See :ref:`running-with-historical-co2-forcing` for more information on how to send CO2 from the data atmosphere model.
+ sets the type of input CO2 for either "constant", "diagnostic" or prognostic". If "constant" the value from ``CCSM_CO2_PPMV`` will be used. If "diagnostic" or "prognostic" the values MUST be sent from the atmosphere model.
===============
User Namelist
===============
-``CLM_NAMELIST_OPTS`` as described above allows you to set any extra namelist items you would like to appear in your namelist. However, it only allows you a single line to enter namelist items, and strings must be quoted with ' which is a bit awkward. If you have a long list of namelist items you want to set (such as a long list of history fields) a convenient way to do it is to add to the ``user_nl_clm`` that is created after the **case.setup** command runs. The file needs to be in valid FORTRAN namelist format (with the exception that the namelist name &namelist and the end of namelist marker "/" are excluded". The **preview_namelist** or **case.run** step will abort if there are syntax errors. All the variable names must be valid and the values must be valid for the datatype and any restrictions for valid values for that variable. Here's an example ``user_nl_clm`` namelist that sets a bunch of history file related items, to create output history files monthly, daily, every six and 1 hours.
+``CLM_NAMELIST_OPTS`` as described above allows you to set any extra namelist items you would like to appear in your namelist. However, it only allows you a single line to enter namelist items, and strings must be quoted with ' which is a bit awkward. If you have a long list of namelist items you want to set (such as a long list of history fields) a convenient way to do it is to add to the ``user_nl_clm`` that is created after the ``case.setup`` command runs. The file needs to be in valid FORTRAN namelist format (with the exception that the namelist name ``&namelist`` and the end of namelist marker ``/`` are excluded. The ``preview_namelist`` or ``case.run`` step will abort if there are syntax errors. All the variable names must be valid and the values must be valid for the datatype and any restrictions for valid values for that variable. Here's an example ``user_nl_clm`` namelist that sets a bunch of history file related items, to create output history files monthly, daily, every six and 1 hours.
----------------------------------
Example: user_nl_clm namelist file
@@ -410,9 +415,9 @@ Example: user_nl_clm namelist file
hist_mfilt = 1, 30, 28, 24
hist_nhtfrq = 0, -24, -6, -1
-**Note:** The comments at the top are some guidance given in the default user_nl_clm and just give some guidance on how to set variables and use the file.
+**Note:** The comments at the top are some guidance given in the default ``user_nl_clm`` and just give some guidance on how to set variables and use the file.
-**Note:** You do NOT need to specify the namelist group that the variables are in because the CLM **build-namelist** knows the namelist that specific variable names belong to, and it puts them there.
+**Note:** You do NOT need to specify the namelist group that the variables are in because the CLM ``build-namelist`` knows the namelist that specific variable names belong to, and it puts them there.
Obviously, all of this would be difficult to put in the CLM_NAMELIST_OPTS variable, especially having to put ' around all the character strings. For more information on the namelist variables being set here and what they mean, see the section on CLM namelists below, as well as the namelist definition that gives details on each variable.
@@ -422,17 +427,17 @@ Obviously, all of this would be difficult to put in the CLM_NAMELIST_OPTS variab
Precedence of Options
---------------------
-Note: The precedence for setting the values of namelist variables with the different env_build.xml, env_run.xml options is (highest to lowest):
+Note: The precedence for setting the values of namelist variables with the different ``env_build.xml``, ``env_run.xml`` options is (highest to lowest):
-1. Namelist values set by specific command-line options, like, -d, -sim_year (i.e. CLM_BLDNML_OPTS env_build.xml variable)
+1. Namelist values set by specific command-line options, like, ``-d``, ``-sim_year`` (i.e. ``CLM_BLDNML_OPTS`` ``env_build.xml`` variable)
-#. Values set on the command-line using the -namelist option, (i.e. CLM_NAMELIST_OPTS env_run.xml variable)
+#. Values set on the command-line using the ``-namelist`` option, (i.e. ``CLM_NAMELIST_OPTS`` ``env_run.xml`` variable)
-#. Values read from the file specified by -infile, (i.e. user_nl_clm file)
+#. Values read from the file specified by ``-infile``, (i.e. ``user_nl_clm`` file)
-#. Datasets from the -clm_usr_name option, (i.e. CLM_USRDAT_NAME env_run.xml variable)
+#. Datasets from the ``-clm_usr_name`` option, (i.e. ``CLM_USRDAT_NAME`` ``env_run.xml`` variable)
-#. Values set from a use-case scenario, e.g., -use_case (i.e. CLM_NML_USE_CASE env_run.xml variable)
+#. Values set from a use-case scenario, e.g., ``-use_case`` (i.e. ``CLM_NML_USE_CASE`` ``env_run.xml`` variable)
#. Values from the namelist defaults file.
@@ -451,7 +456,7 @@ Especially with CLMBGC and CLMCN starting from initial conditions is very import
- :ref:`providing-finidat-in-usernlclm`
- :ref:`adding-finidat-to-xml`
- **Note:** Your initial condition file MUST agree with the surface dataset you are using to run the simulation. If the two files do NOT agree you will get a run-time about a mis-match in PFT weights, or in the number of PFT's or columns. To get around this you'll need to add the "use_init_interp=T" namelist flag in your namelist so that the initial conditions will be interpolated on startup.**
+ **Note:** Your initial condition file MUST agree with the surface dataset you are using to run the simulation. If the two files do NOT agree you will get a run-time about a mis-match in PFT weights, or in the number of PFT's or columns. To get around this you'll need to add the ``use_init_interp=T`` namelist flag in your namelist so that the initial conditions will be interpolated on startup.**
.. _doing-a-hybrid-sim-for-init-conds:
@@ -459,11 +464,11 @@ Especially with CLMBGC and CLMCN starting from initial conditions is very import
Doing a hybrid simulation to provide initial conditions
-------------------------------------------------------
-The first option is to setup a hybrid simulation and give a ``RUN_REFCASE`` and ``RUN_REFDATE`` to specify the reference case simulation name to use. When you setup coupled cases (assuming a CESM checkout), at the standard resolution of "f09" it will already do this for you. For example, if you run an "B1850" compset at "f09_g17_gl4" resolution the following settings will already be done for you.
+The first option is to setup a hybrid simulation and give a ``RUN_REFCASE`` and ``RUN_REFDATE`` to specify the reference case simulation name to use. When you setup coupled cases (assuming a CESM checkout), at the standard resolution of "f09" it will already do this for you. For example, if you run a "B1850" compset at "f09_g17_gl4" resolution the following settings will already be done for you.
``./xmlchange RUN_TYPE=hybrid,RUN_REFCASE=b.e20.B1850.f09_g17.pi_control.all.297,RUN_REFDATE=0130-01-01,GET_REFCASE=TRUE``
-Setting the ``GET_REFCASE`` option to ``TRUE means`` it will copy the files from the RUN_REFDIR usually under: ``$DIN_LOC_ROOT/cesm2_init/$RUN_REFCASE/$RUN_REFDATE`` directory. Note, that the ``RUN_REFCASE`` and ``RUN_REFDATE`` variables are expanded to get the directory name above. If you do NOT set ``GET_REFCASE`` to ``TRUE`` then you will need to have placed the file in your run directory yourself. In either case, the file is expected to be named: ``$RUN_REFCASE.clm2.r.$RUN_REFDATE-00000.nc`` with the variables expanded of course.
+Setting the ``GET_REFCASE`` option to ``TRUE`` means it will copy the files from the RUN_REFDIR usually under: ``$DIN_LOC_ROOT/cesm2_init/$RUN_REFCASE/$RUN_REFDATE`` directory. Note, that the ``RUN_REFCASE`` and ``RUN_REFDATE`` variables are expanded to get the directory name above. If you do NOT set ``GET_REFCASE`` to ``TRUE`` then you will need to have placed the file in your run directory yourself. In either case, the file is expected to be named: ``$RUN_REFCASE.clm2.r.$RUN_REFDATE-00000.nc`` with the variables expanded of course.
.. _doing-a-branch-sim-for-init-conds:
@@ -498,7 +503,7 @@ Like other datasets, if you want to use a given initial condition file to be use
Other noteworthy configuration items
------------------------------------
-For running "I" cases there are several other noteworthy configuration items that you may want to work with. Most of these involve settings for the DATM, but one ``CCSM_CO2_PPMV`` applies to all models. The list of DATM settings is `here `_. If you are running an B, E, or F case that doesn't use the DATM obviously the DATM_* settings will not be used. All of the settings below are in your ``env_build.xml`` and ``env_run.xml`` files
+For running "I" cases there are several other noteworthy configuration items that you may want to work with. Most of these involve settings for the DATM, but one ``CCSM_CO2_PPMV`` applies to all models. The list of DATM settings is `here `_. If you are running a B, E, or F case that doesn't use the DATM obviously the DATM_* settings will not be used. All of the settings below are in your ``env_build.xml`` and ``env_run.xml`` files
::
CCSM_CO2_PPMV
@@ -514,7 +519,7 @@ For running "I" cases there are several other noteworthy configuration items tha
DATM_CPL_YR_END
``CCSM_CO2_PPMV``
- Sets the mixing ratio of CO2 in parts per million by volume for ALL CESM components to use. Note that most compsets already set this value to something reasonable. Also note that some compsets may tell the atmosphere model to override this value with either historic or ramped values. If the CCSM_BGC variable is set to something other than "none" the atmosphere model will determine CO2, and CLM will listen and use what the atmosphere sends it. On the CLM side the namelist item co2_type tells CLM to use the value sent from the atmosphere rather than a value set on it's own namelist.
+ Sets the mixing ratio of CO2 in parts per million by volume for ALL CESM components to use. Note that most compsets already set this value to something reasonable. Also note that some compsets may tell the atmosphere model to override this value with either historic or ramped values. If the ``CCSM_BGC`` variable is set to something other than "none" the atmosphere model will determine CO2, and CLM will listen and use what the atmosphere sends it. On the CLM side the namelist item ``co2_type`` tells CLM to use the value sent from the atmosphere rather than a value set on it's own namelist.
``DATM_MODE``
Sets the mode that the DATM model should run in this determines how data is handled as well as what the source of the data will be. Many of the modes are setup specifically to be used for ocean and/or sea-ice modeling. The modes that are designed for use by CLM are (CLM_QIAN, CLMCRUNCEP, CLMCRUNCEPv7, CLMGSWP3v1 and CLM1PT):
@@ -540,7 +545,7 @@ For running "I" cases there are several other noteworthy configuration items tha
The standard mode for CLM4.0 of using global atmospheric data that was developed by Qian et. al. for CLM using NCEP data from 1948 to 2004. See :ref:`clmqian-and-its-datm` for more information.
``CLM1PT``
- This is for the special cases where we have single-point tower data for particular sites. Right now we only have data for three urban locations: MexicoCity Mexico, Vancouver Canada, and the urban-c alpha site. And we have data for the US-UMB AmeriFlux tower site for University of Michigan Biological Station. See :ref:`clm1pt-and-its-datm` for more information.
+ This is for the special cases where we have single-point tower data for particular sites. Right now we only have data for three urban locations: Mexico City Mexico, Vancouver Canada, and the urban-c alpha site. We also have data for the US-UMB AmeriFlux tower site for University of Michigan Biological Station. See :ref:`clm1pt-and-its-datm` for more information.
``CPLHISTForcing``
This is for running with atmospheric forcing from a previous CESM simulation. See :ref:`cplhistforcing` for more information.
@@ -591,7 +596,7 @@ Downloading DATM Forcing Data
In Chapter One of the `CESM User's Guide `_ there is a section on "Downloading input data". The normal process of setting up cases will use the "scripts/ccsm_utils/Tools/check_input_data" script to retrieve data from the CESM subversion inputdata repository. This is true for the standard `CLM_QIAN` forcing as well.
-The `CLMCRUNCEP` data is uploaded into the subversion inputdata repository as well -- but as it is 1.1 Terabytes of data downloading it is problematic (*IT WILL TAKE SEVERAL DAYS TO DOWNLOAD THE ENTIRE DATASET USING SUBVERSION*). Because of it's size you may also need to download it onto a separate disk space. We have done that on cheyenne for example where it resides in ``$ENV{CESMROOT}/lmwg`` while the rest of the input data resides in ``$ENV{CESMDATAROOT}/inputdata``. The data is also already available on: janus, franklin, and hopper. If you download the data, we recommend that you break your download into several chunks, by setting up a case and setting the year range for ``DATM_CPL_YR_START`` and ``DATM_CPL_YR_END`` in say 20 year sections over 1901 to 2010, and then use **check_input_data** to export the data.
+The `CLMCRUNCEP` data is uploaded into the subversion inputdata repository as well -- but as it is 1.1 Terabytes of data downloading it is problematic (*IT WILL TAKE SEVERAL DAYS TO DOWNLOAD THE ENTIRE DATASET USING SUBVERSION*). Because of its size you may also need to download it onto a separate disk space. We have done that on cheyenne for example where it resides in ``$ENV{CESMROOT}/lmwg`` while the rest of the input data resides in ``$ENV{CESMDATAROOT}/inputdata``. The data is also already available on: janus, franklin, and hopper. If you download the data, we recommend that you break your download into several chunks, by setting up a case and setting the year range for ``DATM_CPL_YR_START`` and ``DATM_CPL_YR_END`` in say 20 year sections over 1901 to 2010, and then use ``check_input_data`` to export the data.
The ``CPLHISTForcing`` DATM forcing data is unique -- because it is large compared to the rest of the input data, and we only have a disk copy on cheyenne. The DATM assumes the path for the previous NCAR machine cheyenne of ``/glade/p/cesm/shared_outputdata/cases/ccsm4/$DATM_CPLHIST_CASE`` for the data. So you will need to change this path in order to run on any other machine. You can download the data itself from NCAR HPSS from ``/CCSM/csm/$DATM_CPLHIST_CASE``.
@@ -599,12 +604,12 @@ The ``CPLHISTForcing`` DATM forcing data is unique -- because it is large compar
Customizing via the build script files
--------------------------------------
-The final thing that the user may wish to do before **case.setup** is run is to edit the build script files which determine the configuration and namelist. The variables in ``env_build.xml`` or ``env_run.xml`` typically mean you will NOT have to edit build script files. But, there are rare instances where it is useful to do so. The build script files are copied to your case directory and are available under Buildconf. The list of build script files you might wish to edit are:
+The final thing that the user may wish to do before ``case.setup`` is run is to edit the build script files which determine the configuration and namelist. The variables in ``env_build.xml`` or ``env_run.xml`` typically mean you will NOT have to edit build script files. But, there are rare instances where it is useful to do so. The build script files are copied to your case directory and are available under ``Buildconf``. The list of build script files you might wish to edit are:
-**clm.buildexe.csh**
-**$CTSMROOT/cime_config/buildnml**
-**datm.buildexe.csh**
-**datm.buildnml.csh**
+``clm.buildexe.csh``
+``$CTSMROOT/cime_config/buildnml``
+``datm.buildexe.csh``
+``datm.buildnml.csh``
.. _more-info-clm-config-script:
@@ -617,7 +622,7 @@ The CLM ``configure`` script defines the details of a clm configuration and summ
Help on CLM configure
---------------------
-Coupling this with looking at the options to CLM **configure** with "-help" as below will enable you to understand how to set the different options.
+Coupling this with looking at the options to CLM ``configure`` with ``-help`` as below will enable you to understand how to set the different options.
::
> cd $CTSMROOT/bld
@@ -694,4 +699,4 @@ The output to the above command is as follows:
no-nitrif Turn the Nitrification/denitrification off
[no-vert,no-cent,no-nitrif,no-vert:no-cent]
-We've given details on how to use the options in env_build.xml and env_run.xml to interact with the CLM "configure" and "build-namelist" scripts, as well as giving a good understanding of how these scripts work and the options to them. In the next section we give further details on the CLM namelist. You could customize the namelist for these options after "case.setup" is run.
+We've given details on how to use the options in ``env_build.xml`` and ``env_run.xml`` to interact with the CLM ``configure`` and ``build-namelist`` scripts, as well as giving a good understanding of how these scripts work and the options to them. In the next section we give further details on the CLM namelist. You could customize the namelist for these options after ``case.setup`` is run.
diff --git a/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-namelist.rst b/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-namelist.rst
index ff76a841c9..2d6f58f317 100644
--- a/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-namelist.rst
+++ b/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-namelist.rst
@@ -6,7 +6,7 @@
Customizing CLM's namelist
============================
-Once a case has run **case.setup**, we can then customize the case further, by editing the run-time namelist for CLM. First let's list the definition of each namelist item and their valid values, and then we'll list the default values for them. Next for some of the most used or tricky namelist items we'll give examples of their use, and give you example namelists that highlight these features.
+Once a case has run ``case.setup``, we can then customize the case further, by editing the run-time namelist for CLM. First let's list the definition of each namelist item and their valid values, and then we'll list the default values for them. Next for some of the most used or tricky namelist items we'll give examples of their use, and give you example namelists that highlight these features.
In the following, various examples of namelists are provided that feature the use of different namelist options to customize a case for particular uses. Most the examples revolve around how to customize the output history fields. This should give you a good basis for setting up your own CLM namelist.
@@ -138,14 +138,14 @@ Example 1-5. Example user_nl_clm namelist removing all history fields
Various ways to change history output averaging flags
-----------------------------------------------------
-There are two ways to change the averaging of output history fields. The first is using ``hist_avgflag_pertape`` which gives a default value for each history stream, the second is when you add fields using ``hist_fincl*``, you add an averaging flag to the end of the field name after a colon (for example 'TSOI:X', would output the maximum of TSOI). The types of averaging that can be done are:
+There are two ways to change the averaging of output history fields. The first is using ``hist_avgflag_pertape`` which gives a default value for each history stream, the second is when you add fields using ``hist_fincl*``, you add an averaging flag to the end of the field name after a colon (for example ``TSOI:X`` would output the maximum of ``TSOI``). The types of averaging that can be done are:
-- *A* Average, over the output interval.
-- *I* Instantaneous, output the value at the output interval.
-- *X* Maximum, over the output interval.
-- *M* Minimum, over the output interval.
+- ``A`` Average, over the output interval.
+- ``I`` Instantaneous, output the value at the output interval.
+- ``X`` Maximum, over the output interval.
+- ``M`` Minimum, over the output interval.
-The default averaging depends on the specific fields, but for most fields is an average. A sample user namelist ``user_nl_clm`` making the monthly output fields all averages (except TSOI for the first two streams and FIRE for the 5th stream), and adding auxiliary file streams for instantaneous (6-hourly), maximum (daily), minimum (daily), and average (daily). For some of the fields we diverge from the per-tape value given and customize to some different type of optimization.
+The default averaging depends on the specific fields, but for most fields is an average. A sample user namelist ``user_nl_clm`` making the monthly output fields all averages (except ``TSOI`` for the first two streams and ``FIRE`` for the 5th stream), and adding auxiliary file streams for instantaneous (6-hourly), maximum (daily), minimum (daily), and average (daily). For some of the fields we diverge from the per-tape value given and customize to some different type of optimization.
Example: user_nl_clm namelist with various ways to average history fields
-------------------------------------------------------------------------------------
@@ -165,7 +165,7 @@ Example: user_nl_clm namelist with various ways to average history fields
hist_avgflag_pertape = 'A', 'I', 'X', 'M', 'A'
hist_nhtfrq = 0, -6, -24, -24, -24
-In the example we put the same list of fields on each of the tapes: soil-temperature, ground temperature, vegetation temperature, emitted longwave radiation, reflected solar radiation, sensible heat, total latent-heat, and total water storage. We also modify the soil-temperature for the primary and secondary auxiliary tapes by outputting them for a maximum instead of the prescribed per-tape of average and instantaneous respectively. For the tertiary auxiliary tape we output ground temperature instantaneous instead of as a maximum, and for the fourth auxiliary tape we output vegetation temperature instantaneous instead of as a minimum. Finally, for the fifth auxiliary tapes we output ``FIRE`` instantaneously instead of as an average.
+In the example we put the same list of fields on each of the tapes: soil-temperature, ground temperature, vegetation temperature, emitted longwave radiation, reflected solar radiation, sensible heat, total latent-heat, and total water storage. We also modify the soil temperature for the primary and secondary auxiliary tapes by outputting them for a maximum instead of the prescribed per-tape of average and instantaneous respectively. For the tertiary auxiliary tape we output ground temperature instantaneous instead of as a maximum, and for the fourth auxiliary tape we output vegetation temperature instantaneous instead of as a minimum. Finally, for the fifth auxiliary tapes we output ``FIRE`` instantaneously instead of as an average.
.. note:: We also use ``hist_empty_htapes`` as in the previous example, so we can list ONLY the fields that we want on the primary history tapes.
@@ -189,8 +189,8 @@ Example: user_nl_clm namelist outputting some files in 1D Vector format
hist_type2d_pertape = ' ', 'GRID', 'COLS', ' '
hist_nhtfrq = 0, -24, -24, -24
-.. warning:: LAND and COLS are also options to the pertape averaging, but currently there is a bug with them and they fail to work.
+.. warning:: ``LAND`` and ``COLS`` are also options to the pertape averaging, but currently there is a bug with them and they fail to work.
-.. note:: Technically the default for hist_nhtfrq is for primary files output monthly and the other auxiliary tapes for daily, so we don't actually have to include hist_nhtfrq, we could use the default for it. Here we specify it for clarity.
+.. note:: Technically the default for ``hist_nhtfrq`` is for primary files output monthly and the other auxiliary tapes for daily, so we don't actually have to include ``hist_nhtfrq``, we could use the default for it. Here we specify it for clarity.
-Visualizing global 1D vector files will take effort. You'll probably want to do some post-processing and possibly just extract out single points of interest to see what is going on. Since, the output is a 1D vector, of only land-points traditional plots won't be helpful. The number of points per grid-cell will also vary for anything, but grid-cell averaging. You'll need to use the output fields pfts1d_ixy, and pfts1d_jxy, to get the mapping of the fields to the global 2D array. pfts1d_itype_veg gives you the PFT number for each PFT. Most likely you'll want to do this analysis in a data processing tool (such as NCL, Matlab, Mathmatica, IDL, etcetera that is able to read and process NetCDF data files).
+Visualizing global 1D vector files will take effort. You'll probably want to do some post-processing and possibly just extract out single points of interest to see what is going on. Since the output is a 1D vector of only land points, traditional plots won't be helpful. The number of points per grid-cell will also vary for anything but grid-cell averaging. You'll need to use the output fields ``pfts1d_ixy``, and ``pfts1d_jxy``, to get the mapping of the fields to the global 2D array. ``pfts1d_itype_veg`` gives you the PFT number for each PFT. Most likely you'll want to do this analysis in a data processing tool (such as NCL, Matlab, Mathmatica, IDL, etc. that is able to read and process NetCDF data files).
diff --git a/doc/source/users_guide/testing/testing.rst b/doc/source/users_guide/testing/testing.rst
index 69ca1f7263..a9b0be0462 100644
--- a/doc/source/users_guide/testing/testing.rst
+++ b/doc/source/users_guide/testing/testing.rst
@@ -6,12 +6,15 @@
Testing
*******
-Technically, you could use the customization we gave in :ref:`customizing_section` to test various configuration and namelist options for CLM. Sometimes, it's also useful to have automated tests though to test that restarts give exactly the same results as without a restart. It's also useful to have automated tests to run over a wide variety of configurations, resolutions, and namelist options. To do that we have several different types of scripts set up to make running comprehensive testing of CLM easy. There are two types of testing scripts for CLM. The first are the CESM test scripts, which utilize the **create_newcase** scripts that we shown how to use in this User's Guide. The second are a set of stand-alone scripts that use the CLM **configure** and **build-namelist** scripts to build and test the model as well as testing the CLM tools as well. Below we will go into further details of how to use both methods.
+Technically, you could use the customization we gave in :ref:`customizing_section` to test various configuration and namelist options for CLM. Sometimes, it's also useful to have automated tests though to test that restarts give exactly the same results as without a restart. It's also useful to have automated tests to run over a wide variety of configurations, resolutions, and namelist options. To do that we have several different types of scripts set up to make running comprehensive testing of CLM easy. There are two types of testing scripts for CLM. The first are the CESM test scripts, which utilize the ``cime/scripts/create_newcase`` scripts that we shown how to use in this User's Guide. The second are a set of stand-alone scripts that use the CLM ``configure`` and ``bld/build-namelist`` scripts to build and test the model as well as testing the CLM tools as well. Below we will go into further details of how to use both methods.
+
+.. todo::
+ Does ``configure`` script still exist?
CIME Testing scripts
====================
-We first introduce the test scripts that work for all CESM components. The CIME script **create_test** runs a specific type of test, at a given resolution, for a given compset using a given machine. See `CIME Chapter on Testing `_ for how to use it to run single tests as well as lists of tests. The standard testname for CLM is "aux_clm" for cheyenne with intel and gnu compilers as well as the CGD machine hobart for intel, nag, and pgi compilers. There's also a shorter test list called "clm_short". Also see the `CTSM Wiki on Testing `_.
+We first introduce the test scripts that work for all CESM components. The CIME script ``create_test`` runs a specific type of test, at a given resolution, for a given compset using a given machine. See `CIME Chapter on Testing `_ for how to use it to run single tests as well as lists of tests. The standard testname for CLM is "aux_clm" for cheyenne with intel and gnu compilers as well as the CGD machine hobart for intel, nag, and pgi compilers. There's also a shorter test list called "clm_short". Also see the `CTSM Wiki on Testing `_.
CTSM Fortran Unit Tests
=======================
@@ -36,14 +39,3 @@ If something went wrong, you can find the failing tests like so:
::
> grep -E "^[0-9]+/[0-9]+ < [a-zA-Z]+" namelist_test.log | grep -v "PASS"
-
-Testing PTCLM
-=============
-
-.. include:: ../../../../tools/PTCLM/README
- :literal:
-
-To run on cheyenne, you do the following:
-
-.. include:: ../../../../tools/PTCLM/test/README.run_cheyenne
- :literal:
diff --git a/doc/source/users_guide/trouble-shooting/trouble-shooting.rst b/doc/source/users_guide/trouble-shooting/trouble-shooting.rst
index e139796f71..a971b7f2cb 100644
--- a/doc/source/users_guide/trouble-shooting/trouble-shooting.rst
+++ b/doc/source/users_guide/trouble-shooting/trouble-shooting.rst
@@ -71,20 +71,26 @@ So here we know that it is either leaf nitrogen (leafn) or leaf carbon (leafc) t
At this point it is useful as a next step to identify the particular patch index and perhaps the pft type that is triggering the error. In this case, the endrun call is already written to provide this information: the patch index and pft type causing the error, along with some other information, are printed in the lines beginning with ``iam``. The ``iam`` value gives the CTSM processor number (this can be obtained in the code via the ``iam`` variable defined in ``spmdMod``). The local patch index is the value of ``p`` in the current patch loop; "local" implies that it refers to this processor's indexing. However, this same value of ``p`` may appear on other processors, since the local indexing on each processor starts with 1. So, to get the unique patch causing the problem, you either need to use the processor's ``iam`` index (there is only one patch with local index 482 on processor 362), or use the global indices printed below the local index. The "global" term here refers to the global index space across all processors (there is only one patch with a global index of 163723 across all processors). See below for how to use the ``get_global_index`` function to translate from local to global indices.
-If you are writing your own ``endrun`` call, you can get this additional information by specifying the ``subgrid_index`` and ``subgrid_level`` arguments; for example::
+If you are writing your own ``endrun`` call, you can get this additional information by specifying the ``subgrid_index`` and ``subgrid_level`` arguments; for example:
+::
+
call endrun(subgrid_index=p, subgrid_level=subgrid_level_patch, msg=errMsg(sourcefile, __LINE__))
(The ``subgrid_level_patch`` constant, and similar constants for the other subgrid levels, are defined in ``decompMod``, so can be accessed via ``use decompMod, only : subgrid_level_patch``.)
-You can get this same information without aborting the run via a call to ``write_point_context``, which is also defined in the ``abortutils`` module; e.g.::
+You can get this same information without aborting the run via a call to ``write_point_context``, which is also defined in the ``abortutils`` module; e.g.:
- if (abs(carbon_patch(p)) < ccrit) then
- call write_point_context(subgrid_index=p, subgrid_level=subgrid_level_patch)
- end if
+::
+
+ if (abs(carbon_patch(p)) < ccrit) then
+ call write_point_context(subgrid_index=p, subgrid_level=subgrid_level_patch)
+ end if
-Or, if all you want is the global index of ``p`` for the sake of writing extra diagnostic prints like the example below, then you can use the ``get_global_index`` function defined in ``decompMod``, like::
+Or, if all you want is the global index of ``p`` for the sake of writing extra diagnostic prints like the example below, then you can use the ``get_global_index`` function defined in ``decompMod``, like:
+::
+
if (abs(carbon_patch(p)) < ccrit) then
write(iulog,*) 'carbon patch significantly negative at local, global p = ', &
p, get_global_index(subgrid_index=p, subgrid_level=subgrid_level_patch)
@@ -92,8 +98,10 @@ Or, if all you want is the global index of ``p`` for the sake of writing extra d
In all of these cases, the output will appear in either the cesm or lnd log file. In the above example, we see that the local patch index is 482 on processor 362 and the global patch index is 163723. From there, one can use this patch index to write out variables that are used in updating leafc, for example, leafc is updated a number of times in CNCStateUpdate1Mod.F90.
-There are two equivalent methods to write a conditional statement to provide more output for the problem patch within a loop over all patches. The first method is to translate the local index to a global index::
+There are two equivalent methods to write a conditional statement to provide more output for the problem patch within a loop over all patches. The first method is to translate the local index to a global index:
+::
+
use decompMod, only : get_global_index, subgrid_level_patch
...
if (get_global_index(p, subgrid_level_patch) == 163723) then
@@ -101,8 +109,10 @@ There are two equivalent methods to write a conditional statement to provide mor
write(iulog,*)'CNCStateUpdate1Mod +leafc_xfer_to_leafc: ',cf_veg%leafc_xfer_to_leafc_patch(p)*dt
end if
-The second method is to use the local index along with the processor number::
+The second method is to use the local index along with the processor number:
+::
+
use spmdMod, only : iam
...
if (p == 482 .and. iam == 362) then
@@ -112,22 +122,22 @@ The second method is to use the local index along with the processor number::
By placing these write statements in the code, one can get a sense of how leafc is evolving toward a negative state and why. This is a very complex example of troubleshooting. To make a long story short, as described `here `_, the error turned out to be caused by a few lines in the phenology code that weren't handling a 20 minute time step properly, thus an actual bug in the code. This was also a good example of where a much less computationally expensive land-only simulation was able to be used for debugging instead of the orginal expensive fully-coupled simulation.
-Another method of troubleshooting is to use the **point_of_interest** module.
+Another method of troubleshooting is to use the ``point_of_interest`` module.
Use the point_of_interest module
--------------------------------
-It is common, when debugging, to want to print the values of various variables for all patches or columns of certain landunit types within a certain grid cell of interest. For example, one might be able to identify a certain grid cell with an erroneous value for a particular history field variable (e.g., GPP) using for example ncview. Once the latitude and longitude of this grid cell has been determined, the point_of_interest module (**src/utils/point_of_interest.F90**) helps create the logical functions needed to do this. This module is compiled into every CTSM build, but is not invoked by default. To use it
+It is common, when debugging, to want to print the values of various variables for all patches or columns of certain landunit types within a certain grid cell of interest. For example, one might be able to identify a certain grid cell with an erroneous value for a particular history field variable (e.g., GPP) using for example ncview. Once the latitude and longitude of this grid cell has been determined, the point_of_interest module (``src/utils/point_of_interest.F90``) helps create the logical functions needed to do this. This module is compiled into every CTSM build, but is not invoked by default. To use it
-(1) Enter in the latitude/longitude of the point of interest in the function **at_poi** in **point_of_interest.F90** by setting the variables **poi_lat** and **poi_lon**.
+(1) Enter in the latitude/longitude of the point of interest in the function ``at_poi`` in ``point_of_interest.F90`` by setting the variables ``poi_lat`` and ``poi_lon``.
-(2) You may customize the **point_of_interest.F90** code by changing the example function (**poi_c**) and/or adding new functions. Look for comments about "Customize" to see what to customize.
+(2) You may customize the ``point_of_interest.F90`` code by changing the example function (``poi_c``) and/or adding new functions. Look for comments about "Customize" to see what to customize.
(3) Add calls to these functions in the CTSM code
-The example function in **point_of_interest.F90** is **poi_c**. It finds columns with a given landunit type (in this case, the natural vegetated landunit). That function can be used in a column-level loop to find columns with that landunit within the grid cell of interest. Its typical use in CTSM code is
+The example function in ``point_of_interest.F90`` is ``poi_c``. It finds columns with a given landunit type (in this case, the natural vegetated landunit). That function can be used in a column-level loop to find columns with that landunit within the grid cell of interest. Its typical use in CTSM code is
::
-
+
do fc = 1, num_nolakec
c = filter_nolakec(fc)
! Various code here, maybe setting foo and bar variables
@@ -136,7 +146,7 @@ The example function in **point_of_interest.F90** is **poi_c**. It finds columns
end if
end do
-You will also need a **use** statement in the module from which you are calling poi_c
+You will also need a ``use`` statement in the module from which you are calling ``poi_c``
::
use point_of_interest, only : poi_c
@@ -152,7 +162,7 @@ Here are some other suggestions on how to track down a problem encountered while
Run with a smaller set of processors
------------------------------------
-One way to simplify the system is to run with a smaller set of processors. You will need to clean the setup and edit the --env_mach_pes.xml--. For example, to run with four processors:
+One way to simplify the system is to run with a smaller set of processors. You will need to clean the setup and edit ``env_mach_pes.xml``. For example, to run with four processors:
::
> ./case.setup -clean
@@ -193,5 +203,5 @@ Along the same lines, you might try running a simpler case, trying another comps
Run with a debugger
-------------------
-Another suggestion is to run the model with a debugger such as: **ddt**, **dbx**, **gdb**, or **totalview**. Often to run with a debugger you will need to reduce the number of processors as outlined above. Some debuggers such as **dbx** will only work with one processor, while more advanced debuggers such as **totalview** can work with both MPI tasks and OMP threads. Even simple debuggers though can be used to query core files, to see where the code was at when it died (for example using the **where** in **dbx** for a core file can be very helpful. For help in running with a debugger you will need to contact your system administrators for the machine you are running on.
+Another suggestion is to run the model with a debugger such as: ``ddt``, ``dbx``, ``gdb``, or ``totalview``. Often to run with a debugger you will need to reduce the number of processors as outlined above. Some debuggers such as ``dbx`` will only work with one processor, while more advanced debuggers such as ``totalview`` can work with both MPI tasks and OMP threads. Even simple debuggers though can be used to query core files, to see where the code was at when it died (for example using the ``where`` in ``dbx`` for a core file can be very helpful. For help in running with a debugger you will need to contact your system administrators for the machine you are running on.
diff --git a/doc/source/users_guide/using-clm-tools/building-the-clm-tools.rst b/doc/source/users_guide/using-clm-tools/building-the-clm-tools.rst
index 95e0333d6d..c84d2a136d 100644
--- a/doc/source/users_guide/using-clm-tools/building-the-clm-tools.rst
+++ b/doc/source/users_guide/using-clm-tools/building-the-clm-tools.rst
@@ -4,102 +4,20 @@
.. include:: ../substitutions.rst
-The CLM FORTRAN tools all have similar makefiles, and similar options for building. The tools **cprnc** and **gen_domain** use the CIME configure/build system which is described in the next section.
+.. todo::
+ Update the below, as domain files aren't needed with nuopc.
+The tools **cprnc** and **gen_domain** use the CIME configure/build system which is described in the next section.
-The Makefiles (for **mksurfdata_esmf** and **mkprocdata_map**) use GNU Make extensions and thus require that you use GNU make to use them. They also auto detect the type of platform you are on, using "uname -s" and set the compiler, compiler flags and such accordingly. There are also environment variables that can be set to set things that must be customized. All the tools use NetCDF and hence require the path to the NetCDF libraries and include files. On some platforms (such as Linux) multiple compilers can be used, and hence there are env variables that can be set to change the FORTRAN and/or "C" compilers used. The tools also allow finer control, by also allowing the user to add compiler flags they choose, for both FORTRAN and "C", as well as picking the compiler, linker and and add linker options. Finally the tools allow you to turn optimization on (which is off by default but on for **mksurfdata_esmf**) with the OPT flag so that the tool will run faster.
-
-Options used by all: **mksurfdata_esmf**
-
-- ``LIB_NETCDF`` -- sets the location of the NetCDF library.
-- ``INC_NETCDF`` -- sets the location of the NetCDF include files.
-- ``USER_FC`` -- sets the name of the FORTRAN compiler.
-
-Options used by: **mkprocdata_map**, and **mksurfdata_esmf**
-
-- ``MOD_NETCDF`` -- sets the location of the NetCDF FORTRAN module.
-- ``USER_LINKER`` -- sets the name of the linker to use.
-- ``USER_CPPDEFS`` -- adds any CPP defines to use.
-- ``USER_CFLAGS`` -- add any "C" compiler flags to use.
-- ``USER_FFLAGS`` -- add any FORTRAN compiler flags to use.
-- ``USER_LDFLAGS`` -- add any linker flags to use.
-- ``USER_CC`` -- sets the name of the "C" compiler to use.
-- ``OPT`` -- set to TRUE to compile the code optimized (TRUE or FALSE)
-- ``SMP`` -- set to TRUE to turn on shared memory parallelism (i.e. OpenMP) (TRUE or FALSE)
-- ``Filepath`` -- list of directories to build source code from.
-- ``Srcfiles`` -- list of source code filenames to build executable from.
-- ``Makefile`` -- customized makefile options for this particular tool.
-- ``mkDepends`` -- figure out dependencies between source files, so make can compile in order..
-- ``Makefile.common`` -- General tool Makefile that should be the same between all tools.
-
-More details on each environment variable.
-
-``LIB_NETCDF``
- This variable sets the path to the NetCDF library file (``libnetcdf.a``). If not set it defaults to ``/usr/local/lib``. In order to use the tools you need to build the NetCDF library and be able to link to it. In order to build the model with a particular compiler you may have to compile the NetCDF library with the same compiler (or at least a compatible one).
-
-``INC_NETCDF``
- This variable sets the path to the NetCDF include directory (in order to find the include file ``netcdf.inc``). if not set it defaults to ``/usr/local/include``.
-
-``MOD_NETCDF``
- This variable sets the path to the NetCDF module directory (in order to find the NetCDF FORTRAN-90 module file when NetCDF is used with a FORTRAN-90 **use statement**. When not set it defaults to the ``LIB_NETCDF`` value.
-
-``USER_FC``
- This variable sets the command name to the FORTRAN-90 compiler to use when compiling the tool. The default compiler to use depends on the platform. And for example, on the AIX platform this variable is NOT used
-
-``USER_LINKER``
- This variable sets the command name to the linker to use when linking the object files from the compiler together to build the executable. By default this is set to the value of the FORTRAN-90 compiler used to compile the source code.
-
-``USER_CPPDEFS``
- This variable adds additional optional values to define for the C preprocessor. Normally, there is no reason to do this as there are very few CPP tokens in the CLM tools. However, if you modify the tools there may be a reason to define new CPP tokens.
-
-``USER_CC``
- This variable sets the command name to the "C" compiler to use when compiling the tool. The default compiler to use depends on the platform. And for example, on the AIX platform this variable is NOT used
-
-``USER_CFLAGS``
- This variable adds additional compiler options for the "C" compiler to use when compiling the tool. By default the compiler options are picked according to the platform and compiler that will be used.
-
-``USER_FFLAGS``
- This variable adds additional compiler options for the FORTRAN-90 compiler to use when compiling the tool. By default the compiler options are picked according to the platform and compiler that will be used.
-
-``USER_LDFLAGS``
- This variable adds additional options to the linker that will be used when linking the object files into the executable. By default the linker options are picked according to the platform and compiler that is used.
-
-``SMP``
- This variable flags if shared memory parallelism (using OpenMP) should be used when compiling the tool. It can be set to either TRUE or FALSE, by default it is set to FALSE, so shared memory parallelism is NOT used. When set to TRUE you can set the number of threads by using the OMP_NUM_THREADS environment variable. Normally, the most you would set this to would be to the number of on-node CPU processors. Turning this on should make the tool run much faster.
-
-.. warning:: Note, that depending on the compiler answers may be different when SMP is activated.
-
-``OPT``
- This variable flags if compiler optimization should be used when compiling the tool. It can be set to either ``TRUE`` or ``FALSE``, by default it is set to for both **mksurfdata_esmf** and **mkprocdata_map**. Turning this on should make the tool run much faster.
-
-.. warning:: Note, you should expect that answers will be different when ``OPT`` is activated.
-
-``Filepath``
- All of the tools are stand-alone and don't need any outside code to operate. The Filepath is the list of directories needed to compile and hence is always simply "." the current directory. Several tools use copies of code outside their directory that is in the CESM distribution (either ``csm_share`` code or CLM source code).
-
-``Srcfiles``
- The ``Srcfiles`` lists the filenames of the source code to use when building the tool.
-
-``Makefile``
- The ``Makefile`` is the custom GNU Makefile for this particular tool. It will customize the ``EXENAME`` and the optimization settings for this particular tool.
-
-``Makefile.common``
- The ``Makefile.common`` is the copy of the general GNU Makefile for all the CLM tools. This file should be identical between the different tools. This file has different sections of compiler options for different Operating Systems and compilers.
-
-``mkDepends``
- The ``mkDepends`` is the copy of the perl script used by the ``Makefile.common`` to figure out the dependencies between the source files so that it can compile in the necessary order. This file should be identical between the different tools.
-
-.. note:: There are several files that are copies of the original files. By having copies the tools can all be made stand-alone, but any changes to the originals will have to be put into the tool directories as well.
-
-The *README.filecopies* (which can be found in ``$CTSMROOT/tools``) is repeated here.
-
-.. include:: ../../../../tools/README.filecopies
- :literal:
+The only CLM FORTRAN tool is mksurfdata_esmf which has it's own build system that takes advantage of the cime build.
================================================================
Building the CLM tools that use the CIME configure/build system
================================================================
-**cprnc** and *gen_domain** both use the CIME configure/build system rather than the CLM specific version described above.
+.. todo::
+ Update the below, as domain files aren't needed with nuopc.
+
+``cprnc`` and ``gen_domain`` both use the CIME configure/build system rather than the CLM specific version described above.
-See `CIME documentation on adding grids `_ for more information on adding grids, creating mapping files, and running **gen_domain**. Also see the CIME file: ``$CTSMROOT/tools/mapping/gen_domain_files/INSTALL`` for how to build **gen_domain**.
+See `CIME documentation on adding grids `_ for more information on adding grids, creating mapping files, and running ``gen_domain``. Also see the CIME file: ``$CTSMROOT/tools/mapping/gen_domain_files/INSTALL`` for how to build ``gen_domain``.
diff --git a/doc/source/users_guide/using-clm-tools/cprnc.rst b/doc/source/users_guide/using-clm-tools/cprnc.rst
index 05a2ca8279..81418877c9 100644
--- a/doc/source/users_guide/using-clm-tools/cprnc.rst
+++ b/doc/source/users_guide/using-clm-tools/cprnc.rst
@@ -6,7 +6,7 @@
Comparing History Files
=========================
-**cprnc** is a tool shared by |cesmrelease| to compare two NetCDF history files. It differences every field that is shared on both files, and reports a summary of the difference. The summary includes the three largest differences, as well as the root mean square (RMS) difference. It also gives some summary information on the field as well. You have to enter at least one file, and up to two files. With one file it gives you summary information on the file, and with two it gives you information on the differences between the two. At the end it will give you a summary of the fields compared and how many fields were different and how many were identical.
+``cprnc`` is a tool shared by |cesmrelease| to compare two NetCDF history files. It differences every field that is shared on both files, and reports a summary of the difference. The summary includes the three largest differences, as well as the root mean square (RMS) difference. It also gives some summary information on the field as well. You have to enter at least one file, and up to two files. With one file it gives you summary information on the file, and with two it gives you information on the differences between the two. At the end it will give you a summary of the fields compared and how many fields were different and how many were identical.
Options:
diff --git a/doc/source/users_guide/using-clm-tools/creating-domain-files.rst b/doc/source/users_guide/using-clm-tools/creating-domain-files.rst
index d4ebd6c4e7..90814e1927 100644
--- a/doc/source/users_guide/using-clm-tools/creating-domain-files.rst
+++ b/doc/source/users_guide/using-clm-tools/creating-domain-files.rst
@@ -6,7 +6,10 @@
Creating CLM domain files
*****************************
-*gen_domain* to create a domain file for datm from a mapping file. ``gen_domain`` is a tool that is a part of CIME. The domain file is then used by BOTH DATM AND CLM to define the grid and land-mask. The general data flow is shown in two figures. :numref:`Figure mkmapdata.sh` shows the general flow for a general global case (or for a regional grid that DOES include ocean). :numref:`Figure mknoocnmap.pl` shows the use of ``mknoocnmap.pl`` (see :ref:`using-mkocnmap`) to create a regional or single-point map file that is then run through ``gen_domain`` to create the domain file for it. As stated before :numref:`Figure Data_Flow_Legend` is the legend for both of these figures. See `the $CIMEROOT/tools/mapping/gen_domain_files/README `_ file for more help on ``gen_domain``.
+.. todo::
+ Delete this page? Domain files aren't needed with nuopc.
+
+``gen_domain`` to create a domain file for datm from a mapping file. ``gen_domain`` is a tool that is a part of CIME. The domain file is then used by BOTH DATM AND CLM to define the grid and land-mask. The general data flow is shown in two figures. :numref:`Figure mkmapdata.sh` shows the general flow for a general global case (or for a regional grid that DOES include ocean). :numref:`Figure mknoocnmap.pl` shows the use of ``mknoocnmap.pl`` (see :ref:`using-mkocnmap`) to create a regional or single-point map file that is then run through ``gen_domain`` to create the domain file for it. As stated before :numref:`Figure Data_Flow_Legend` is the legend for both of these figures. See `the $CIMEROOT/tools/mapping/gen_domain_files/README `_ file for more help on ``gen_domain``.
Here we create domain files for a regular global domain.
@@ -19,7 +22,7 @@ Global Domain file creation
Global Domain file creation
-Starting from SCRIP grid files for both your atmosphere and ocean, you use **$CIMEROOT/tools/mapping/gen_mapping_files/gen_cesm_maps.sh** to create a mapping file between the atmosphere and ocean. That mapping file is then used as input to **gen_domain** to create output domain files for both atmosphere and ocean. The atmosphere domain file is then used by both CLM and DATM for I compsets, while the ocean domain file is ignored. For this process you have to define your SCRIP grid files on your own. For a regional or single-point case that doesn't include ocean see :numref:`Figure mknoocnmap.pl`. (See :numref:`Figure Global-Domain` for the legend for this figure.)
+Starting from SCRIP grid files for both your atmosphere and ocean, you use ``$CIMEROOT/tools/mapping/gen_mapping_files/gen_cesm_maps.sh`` to create a mapping file between the atmosphere and ocean. That mapping file is then used as input to ``gen_domain`` to create output domain files for both atmosphere and ocean. The atmosphere domain file is then used by both CLM and DATM for I compsets, while the ocean domain file is ignored. For this process you have to define your SCRIP grid files on your own. For a regional or single-point case that doesn't include ocean see :numref:`Figure mknoocnmap.pl`. (See :numref:`Figure Global-Domain` for the legend for this figure.)
Note that the SCRIP grid file used to start this process is also used in ``mkmapdata.sh`` (see :ref:`using-mkocnmap`). Next we create domain files for a single-point or regional domain.
@@ -32,6 +35,6 @@ Domain file creation using mknoocnmap.pl
Domain file creation using mknoocnmap.pl
-For a regular latitude/longitude grid that can be used for regional or single point simulations -- you can use **mknoocnmap.pl**. It creates a SCRIP grid file that can then be used as input to **mkmapdata.sh** as well as a SCRIP mapping file that is then input to **gen_domain**. The output of **gen_domain** is a atmosphere domain file used by both CLM and DATM and a ocean domain file that is ignored. (See :numref:`Figure mknoocnmap.pl` for the legend for this figure.)
+For a regular latitude/longitude grid that can be used for regional or single point simulations -- you can use ``mknoocnmap.pl``. It creates a SCRIP grid file that can then be used as input to ``mkmapdata.sh`` as well as a SCRIP mapping file that is then input to ``gen_domain``. The output of ``gen_domain`` is a atmosphere domain file used by both CLM and DATM and a ocean domain file that is ignored. (See :numref:`Figure mknoocnmap.pl` for the legend for this figure.)
-In this case the process creates both SCRIP grid files to be used by **mkmapdata.sh** as well as the domain files that will be used by both CLM and DATM.
+In this case the process creates both SCRIP grid files to be used by ``mkmapdata.sh`` as well as the domain files that will be used by both CLM and DATM.
diff --git a/doc/source/users_guide/using-clm-tools/creating-input-for-surface-dataset-generation.rst b/doc/source/users_guide/using-clm-tools/creating-input-for-surface-dataset-generation.rst
index a727631a6c..66a65adbde 100644
--- a/doc/source/users_guide/using-clm-tools/creating-input-for-surface-dataset-generation.rst
+++ b/doc/source/users_guide/using-clm-tools/creating-input-for-surface-dataset-generation.rst
@@ -18,7 +18,10 @@ SCRIP grid files for all the standard model resolutions and the raw surface data
Using mknocnmap.pl to create grid and maps for single-point regional grids
--------------------------------------------------------------------------
-If you want to create a regular latitude/longitude single-point or regional grid, we suggest you use **mknoocnmap.pl** in ``$CTSMROOT/tools/mkmapdata`` which will create both the SCRIP grid file you need (using ``$CTSMROOT/tools/mkmapgrids/mkscripgrid.ncl`` AND an identity mapping file assuming there is NO ocean in your grid domain. If you HAVE ocean in your domain you could modify the mask in the SCRIP grid file for ocean, and then use **ESMF_RegridWeightGen** to create the mapping file, and **gen_domain** to create the domain file. Like other tools, ``./mkmapdata/mknoocnmap.pl`` has a help option with the following:
+.. todo::
+ Update the below, as domain files aren't needed with nuopc.
+
+If you want to create a regular latitude/longitude single-point or regional grid, we suggest you use ``mknoocnmap.pl`` in ``$CTSMROOT/tools/mkmapdata`` which will create both the SCRIP grid file you need (using ``$CTSMROOT/tools/mkmapgrids/mkscripgrid.ncl``) AND an identity mapping file assuming there is NO ocean in your grid domain. If you HAVE ocean in your domain you could modify the mask in the SCRIP grid file for ocean, and then use ``ESMF_RegridWeightGen`` to create the mapping file, and ``gen_domain`` to create the domain file. Like other tools, ``./mkmapdata/mknoocnmap.pl`` has a help option with the following:
::
SYNOPSIS
diff --git a/doc/source/users_guide/using-clm-tools/creating-surface-datasets.rst b/doc/source/users_guide/using-clm-tools/creating-surface-datasets.rst
index d2e2ef7c89..8394f17b3f 100644
--- a/doc/source/users_guide/using-clm-tools/creating-surface-datasets.rst
+++ b/doc/source/users_guide/using-clm-tools/creating-surface-datasets.rst
@@ -26,29 +26,32 @@ Starting from a SCRIP grid file that describes the grid you will run the model o
Green arrows define the input to a program, while red arrows define the output. Cylinders define files that are either created by a program or used as input for a program. Boxes are programs.
-You start with a description of a SCRIP grid file for your output grid file and then create mapping files from the raw datasets to it. Once, the mapping files are created **mksurfdata_esmf** is run to create the surface dataset to run the model.
+You start with a description of a SCRIP grid file for your output grid file and then create mapping files from the raw datasets to it. Once, the mapping files are created ``mksurfdata_esmf`` is run to create the surface dataset to run the model.
Creating a Complete Set of Files for Input to CLM
-------------------------------------------------
1. Create SCRIP grid datasets (if NOT already done)
- First you need to create a descriptor file for your grid, that includes the locations of cell centers and cell corners. There is also a "mask" field, but in this case the mask is set to one everywhere (i.e. all of the masks for the output model grid are "nomask"). An example SCRIP grid file is: $CSMDATA/lnd/clm2/mappingdata/grids/SCRIPgrid_10x15_nomask_c110308.nc. The mkmapgrids and mkscripgrid.ncl NCL script in the $CTSMROOT/tools/mkmapgrids directory can help you with this. SCRIP grid files for all the standard CLM grids are already created for you. See the Section called Creating an output SCRIP grid file at a resolution to run the model on for more information on this.
+ First you need to create a descriptor file for your grid, that includes the locations of cell centers and cell corners. There is also a "mask" field, but in this case the mask is set to one everywhere (i.e. all of the masks for the output model grid are "nomask"). An example SCRIP grid file is: ``$CSMDATA/lnd/clm2/mappingdata/grids/SCRIPgrid_10x15_nomask_c110308.nc``. The ``mkmapgrids`` and ``mkscripgrid.ncl`` NCL script in the ``$CTSMROOT/tools/mkmapgrids`` directory can help you with this. SCRIP grid files for all the standard CLM grids are already created for you. See the Section called Creating an output SCRIP grid file at a resolution to run the model on for more information on this.
+
+.. todo::
+ Update the below, as domain files aren't needed with nuopc.
2. Create domain dataset (if NOT already done)
- Next use gen_domain to create a domain file for use by DATM and CLM. This is required, unless a domain file was already created. See the Section called Creating a domain file for CLM and DATM for more information on this.
+ Next use ``gen_domain`` to create a domain file for use by DATM and CLM. This is required, unless a domain file was already created. See the Section called Creating a domain file for CLM and DATM for more information on this.
-3. Create mapping files for mksurfdata_esmf (if NOT already done)
+3. Create mapping files for ``mksurfdata_esmf`` (if NOT already done)
- Create mapping files for mksurfdata_esmf with mkmapdata.sh in $CTSMROOT/tools/mkmapdata. See the Section called Creating mapping files that mksurfdata_esmf will use for more information on this.
+ Create mapping files for ``mksurfdata_esmf`` with ``mkmapdata.sh`` in ``$CTSMROOT/tools/mkmapdata``. See the Section called Creating mapping files that ``mksurfdata_esmf`` will use for more information on this.
4. Create surface datasets
- Next use mksurfdata_esmf to create a surface dataset, using the mapping datasets created on the previous step as input. There is a version for either clm4_0 or |version| for this program. See the Section called Using mksurfdata_esmf to create surface datasets from grid datasets for more information on this.
+ Next use ``mksurfdata_esmf`` to create a surface dataset, using the mapping datasets created on the previous step as input. There is a version for either clm4_0 or |version| for this program. See the Section called Using ``mksurfdata_esmf`` to create surface datasets from grid datasets for more information on this.
-5. Enter the new datasets into the build-namelist XML database
- The last optional thing to do is to enter the new datasets into the build-namelist XML database. See Chapter 3 for more information on doing this. This is optional because the user may enter these files into their namelists manually. The advantage of entering them into the database is so that they automatically come up when you create new cases.
+5. Enter the new datasets into the ``build-namelist`` XML database
+ The last optional thing to do is to enter the new datasets into the ``build-namelist`` XML database. See Chapter 3 for more information on doing this. This is optional because the user may enter these files into their namelists manually. The advantage of entering them into the database is so that they automatically come up when you create new cases.
The ``$CTSMROOT/tools/README`` goes through the complete process for creating input files needed to run CLM. We repeat that file here:
diff --git a/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst b/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst
index 50a7969281..82169e8238 100644
--- a/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst
+++ b/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst
@@ -6,6 +6,9 @@
Observational Sites Datasets
*******************************
-There are two ways to customize datasets for a particular observational site. The first is to customize the input to the tools that create the dataset, and the second is to overwrite the default data after you've created a given dataset. Depending on the tool it might be easier to do it one way or the other. In Table :numref:`reqd-files-table` we list the files that are most likely to be customized and the way they might be customized. Of those files, the ones you are most likely to customize are: ``fatmlndfrc``, ``fsurdat``, ``faerdep`` (for DATM), and ``stream_fldfilename_ndep``. Note ``mksurfdata_esmf`` as documented previously has options to overwrite the vegetation and soil types. For more information on this also see :ref:`creating-your-own-singlepoint-dataset`. ``PTCLM`` uses these methods to customize datasets; see Chapter :numref:`running-PTCLM`.
+.. todo::
+ Update this.
-Another aspect of customizing your input datasets is customizing the input atmospheric forcing datasets; see :ref:`creating-your-own-singlepoint-dataset` for more information on this. :ref:`converting-ameriflux-for-ptclmmkdata` has information on using the AmeriFlux tower site data as atmospheric forcing.
+There are two ways to customize datasets for a particular observational site. The first is to customize the input to the tools that create the dataset, and the second is to overwrite the default data after you've created a given dataset. Depending on the tool it might be easier to do it one way or the other. In Table :numref:`reqd-files-table` we list the files that are most likely to be customized and the way they might be customized. Of those files, the ones you are most likely to customize are: ``fatmlndfrc``, ``fsurdat``, ``faerdep`` (for DATM), and ``stream_fldfilename_ndep``. Note ``mksurfdata_esmf`` as documented previously has options to overwrite the vegetation and soil types. For more information on this also see :ref:`creating-your-own-singlepoint-dataset`.
+
+Another aspect of customizing your input datasets is customizing the input atmospheric forcing datasets; see :ref:`creating-your-own-singlepoint-dataset` for more information on this.
diff --git a/doc/source/users_guide/using-clm-tools/what-are-the-clm-tools.rst b/doc/source/users_guide/using-clm-tools/what-are-the-clm-tools.rst
index 664e23a220..df7988451d 100644
--- a/doc/source/users_guide/using-clm-tools/what-are-the-clm-tools.rst
+++ b/doc/source/users_guide/using-clm-tools/what-are-the-clm-tools.rst
@@ -6,27 +6,32 @@
What are the CLM tools
========================
+.. todo::
+ Remove references to mkprocdata_map?
+
There are several tools provided with CLM that allow you to create your own input datasets at resolutions you choose, or to interpolate initial conditions to a different resolution, or used to compare CLM history files between different cases. The tools are all available in the ``$CTSMROOT/tools`` directory. Most of the tools are FORTRAN stand-alone programs in their own directory, but there is also a suite of NCL scripts in the ``$CTSMROOT/tools//ncl_scripts`` directory, and some of the tools are scripts that may also call the ESMF regridding program. Some of the NCL scripts are very specialized and not meant for general use, and we won't document them here. They still contain documentation in the script itself and the README file in the tools directory.
The tools produce files that can be used for CLM4.5 and |version|. They do **NOT** produce files that can be used for CLM4.0. If you need files for CLM4.0, you'll need to use a previous version of CLM.
The list of generally important scripts and programs are as follows.
-1. *./mkmapgrids* to create SCRIP grid data files from old CLM format grid files that can then be used to create new CLM datasets (deprecated). There is also a NCL script (``./mkmapgrids/mkscripgrid.ncl`` to create SCRIP grid files for regular latitude/longitude grids.
-
-#. *./mkmapdata* to create SCRIP mapping data file from SCRIP grid files (uses ESMF).
+1. ``./mkmapgrids`` to create SCRIP grid data files from old CLM format grid files that can then be used to create new CLM datasets (deprecated). There is also a NCL script (``./mkmapgrids/mkscripgrid.ncl``) to create SCRIP grid files for regular latitude/longitude grids.
-#. *mksurfdata_esmf* to create surface datasets from grid datasets (clm4_0 and |version| versions).
+#. ``./mkmapdata`` to create SCRIP mapping data file from SCRIP grid files (uses ESMF).
-#. *./mkprocdata_map* to interpolate output unstructured grids (such as the CAM HOMME dy-core "ne" grids like ne30np4) into a 2D regular lat/long grid format that can be plotted easily. Can be used by either clm4_0 or |version|.
+#. ``mksurfdata_esmf`` to create surface datasets from grid datasets (clm4_0 and |version| versions).
+.. todo::
+ Update the below, as domain files aren't needed with nuopc.
#. *$CIMEROOT/tools/mapping/gen_domain_files/gen_domain* to create a domain file for datm from a mapping file. The domain file is then used by BOTH datm AND CLM to define the grid and land-mask.
-#. *$CIMEROOT/tools/cprnc* to compare two NetCDF files.
+#. ``$CIMEROOT/tools/mapping/gen_domain_files/gen_domain`` to create a domain file for datm from a mapping file. The domain file is then used by BOTH datm AND CLM to define the grid and land-mask.
+
+#. ``$CIMEROOT/tools/cprnc`` to compare two NetCDF files.
In the sections to come we will go into detailed description of how to use each of these tools in turn. First, however we will discuss the common environment variables and options that are used by all of the FORTRAN tools. Second, we go over the outline of the entire file creation process for all input files needed by CLM for a new resolution, then we turn to each tool. In the last section we will discuss how to customize files for particular observational sites.
-The FORTRAN tools (mksurfdata_esmf and mkprocdata_map) run, with a namelist (mksurfdata_esmf) to provide options, or with command line arguments (mkprocdata_map).
+The FORTRAN tool (``mksurfdata_esmf``) runs, with a namelist and has a namelist builder for it.
In the following sections, we will outline how to make these files available for build-namelist so that you can easily create simulations that include them. In the chapter on single-point and regional datasets we also give an alternative way to enter new datasets without having to edit files.
@@ -34,34 +39,21 @@ In the following sections, we will outline how to make these files available for
Running FORTRAN tools with namelists
------------------------------------
-**mksurfdata_esmf** runs with a namelist that is read from standard input. Hence, you create a namelist and then run them by redirecting the namelist file into standard input as follows:
+``mksurfdata_esmf`` runs with a namelist that is read from standard input. Hence, you create a namelist and then run them by redirecting the namelist file into standard input as follows:
::
./program < namelist
-There is a sample namelist called ``$CTSMROOT/tools/mksurfdata_esmf/mksurfdata_esmf.namleist`` that shows you what the namelist should look like. **mksurfdata_esmf** also has a script that creates the namelist and runs the program for you. Namelists that you create should be similar to the example namelist. The namelist values are also documented along with the other namelists in the:
+**mksurfdata_esmf** also has a script that creates the namelist and runs the program for you. The namelist values are also documented along with the other namelists in the:
::
- $CTSMROOT/bld/namelist_files/namelist_definition.xml`` file
- and default values in the:
- $CTSMROOT/bld/namelist_files/namelist_defaults_clm_tools.xml`` file.
+ $CTSMROOT/tools/mksurfdata_esmf/gen_mksurfdata_namelist.xml`` file
-----------------------------------------------
Running FORTRAN tools with command line options
-----------------------------------------------
-**gen_domain**, mkprocdata_map, and **cprnc** run with command line arguments. The detailed sections below will give you more information on the command line arguments specific to each tool. Also running the tool without any arguments will give you a general synopsis on how to run the tool.
-
------------------------------------------
-Running FORTRAN tools built with SMP=TRUE
------------------------------------------
-
-When you enable ``SMP=TRUE`` on your build of one of the tools that make use of it, you are using OpenMP for shared memory parallelism (SMP). In SMP loops are run in parallel with different threads run on different processors all of which access the same memory (called on-node). Thus you can only usefully run up to the number of processors that are available on a single-node of the machine you are running on. For example, on the NCAR machine cheyenne there are 36 processors per node, so you can use up to 36 processors.
-
-.. _using-ncl:
-
----------
-Using NCL
----------
+.. todo::
+ Update the below, as domain files aren't needed with nuopc.
-In the tools directory ``$CTSMROOT/tools/ncl_scripts`` and in a few other locations there are scripts that use NCAR Command Language (NCL). Unlike the FORTRAN tools, you will need to get a copy of NCL in order to use them. You also won't have to build an executable in order to use them, hence no Makefile is provided. NCL is provided for free download as either binaries or source code from: `http://www.ncl.ucar.edu/ `_. The NCL web-site also contains documentation on NCL and it's use. These scripts are stand-alone and at most use environment variables to control how to use them. In some cases there are perl scripts with command line arguments that call the NCL scripts to control what they do.
+**gen_domain** and **cprnc** run with command line arguments. The detailed sections below will give you more information on the command line arguments specific to each tool. Also running the tool without any arguments will give you a general synopsis on how to run the tool.
diff --git a/doc/source/users_guide/using-mesh-maker/how-to-make-mesh.rst b/doc/source/users_guide/using-mesh-maker/how-to-make-mesh.rst
new file mode 100644
index 0000000000..f87394fd89
--- /dev/null
+++ b/doc/source/users_guide/using-mesh-maker/how-to-make-mesh.rst
@@ -0,0 +1,67 @@
+.. include:: ../substitutions.rst
+
+.. _how-to-make-mesh:
+
+===============================================
+ Creating an ESMF mesh file from a netCDF file
+===============================================
+
+This page includes instructions for using the ``mesh_maker`` tool to create a mesh file from a netCDF file with valid 1D or 2D latitude and longitude coordinates. It also shows how to use ``mesh_plotter`` to visualize a mesh file.
+
+.. note:: An **ESMF mesh file** is a netCDF file that includes the information about the grid's coordinates and their connectivity to each other in an **Unstructured Grid Format**. Additional information about ESMF mesh files is available `here `_.
+
+You can check out the ``mesh_maker`` options like so:
+
+::
+
+ > tools/site_and_regional/mesh_maker --help
+
+ |------------------------------------------------------------------|
+ |--------------------- Instructions -----------------------------|
+ |------------------------------------------------------------------|
+ This script creates ESMF unstructured GRID (mesh file) from a netCDF
+ file with valid lats and lons. Provided lats and lons can be 1D or 2D.
+
+ For example for running WRF-CTSM cases, the user can create a mesh
+ file for their domain :
+ ./mesh_maker.py --input wrfinput_d01 --output my_region
+ --lat XLAT --lon XLONG --verbose
+
+ optional arguments:
+ -h, --help show this help message and exit
+ --input INPUT Netcdf input file for creating ESMF mesh.
+ --output OUTPUT Name of the ESMF mesh created.
+ --outdir OUT_DIR Output directory (only if name of output mesh is not
+ defined)
+ --lat LAT_NAME Name of latitude varibale on netCDF input file. If none
+ given, looks to find variables that include 'lat'.
+ --lon LON_NAME Name of latitude varibale on netCDF input file. If none
+ given, looks to find variables that include 'lon'.
+ --mask MASK_NAME Name of mask varibale on netCDF input file. If none given,
+ create a fake mask with values of 1.
+ --area AREA_NAME Name of area variable on netCDF input file. If none given,
+ ESMF calculates element areas automatically.
+ --overwrite If meshfile exists, overwrite the meshfile.
+ -v, --verbose Increase output verbosity
+
+Example: Making and visualizing a mesh file
+-------------------------------------------
+
+In this example, we will use ``mesh_maker`` to create a mesh file from a netCDF file with 2D latitudes and longitudes. On the sample input provided, those coordinates are saved on the ``LATIXY`` and ``LONGXY`` variables, respectively.
+
+::
+
+ input_file="python/ctsm/test/testinputs/surfdata_5x5_amazon_hist_78pfts_CMIP6_2000_c230517.nc"
+ output_file="meshfile_5x5_amazon.nc"
+
+ # Create the file. (Add --verbose for additional debugging information.)
+ tools/site_and_regional/mesh_maker --input "${input_file}" --output "${output_file}" --lon LONGXY --lat LATIXY
+
+ # Visualize the meshes
+ tools/site_and_regional/mesh_plotter --input "${output_file}"
+
+This produces two figures:
+
+.. figure:: test_c240918_regional.png
+
+.. figure:: test_c240918_global.png
diff --git a/doc/source/users_guide/running-PTCLM/index.rst b/doc/source/users_guide/using-mesh-maker/index.rst
similarity index 72%
rename from doc/source/users_guide/running-PTCLM/index.rst
rename to doc/source/users_guide/using-mesh-maker/index.rst
index 0b44c01b49..ecc20bab9c 100644
--- a/doc/source/users_guide/running-PTCLM/index.rst
+++ b/doc/source/users_guide/using-mesh-maker/index.rst
@@ -5,16 +5,13 @@
.. include:: ../substitutions.rst
-.. _running-PTCLM:
+.. _using-mesh-maker-index:
#####################################
-Running PTCLM
+Using mesh_maker
#####################################
.. toctree::
:maxdepth: 2
- introduction-to-ptclm.rst
- using-ptclm.rst
- ptclm-examples.rst
- adding-ptclm-site-data.rst
+ how-to-make-mesh.rst
diff --git a/doc/source/users_guide/using-mesh-maker/test_c240918_global.png b/doc/source/users_guide/using-mesh-maker/test_c240918_global.png
new file mode 100644
index 0000000000..b951c570ab
--- /dev/null
+++ b/doc/source/users_guide/using-mesh-maker/test_c240918_global.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:48f47d585b38798710f59edf4cee8fd6b5cb77c81ec1160749fec706c0b106b4
+size 1031528
diff --git a/doc/source/users_guide/using-mesh-maker/test_c240918_regional.png b/doc/source/users_guide/using-mesh-maker/test_c240918_regional.png
new file mode 100644
index 0000000000..f1b761ba10
--- /dev/null
+++ b/doc/source/users_guide/using-mesh-maker/test_c240918_regional.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1dfaa42056d68629c2c6ba1e847ae5e462fccf7d2175d6f6b4d65d5b3aad7491
+size 642577
diff --git a/python/ctsm/mesh_plotter.py b/python/ctsm/mesh_plotter.py
index 7bcbae6fb4..0a2af11856 100644
--- a/python/ctsm/mesh_plotter.py
+++ b/python/ctsm/mesh_plotter.py
@@ -62,12 +62,26 @@ def get_parser():
parser.add_argument(
"--overwrite",
- help="If plots xists, overwrite them.",
+ help="If plots exist, overwrite them.",
action="store_true",
dest="overwrite",
required=False,
)
+ parser.add_argument(
+ "--no-center-coords",
+ help="Do not include red Xs at center of grid cells.",
+ action="store_true",
+ required=False,
+ )
+
+ default_dpi = 300
+ parser.add_argument(
+ "--dpi",
+ help=f"Dots per square inch in output; default {default_dpi}",
+ type=float,
+ )
+
add_logging_args(parser)
return parser
@@ -98,9 +112,10 @@ def process_and_check_args(args):
today = datetime.today()
today_string = today.strftime("%y%m%d")
+ input_filename = os.path.basename(args.input)
args.output = os.path.join(
args.out_dir,
- os.path.splitext(args.input)[0] + "_c" + today_string,
+ os.path.splitext(input_filename)[0] + "_c" + today_string,
)
if not os.path.isfile(args.input):
@@ -148,10 +163,15 @@ def main():
this_mesh.read_file(ds)
plot_regional = os.path.splitext(mesh_out)[0] + "_regional" + ".png"
+ file_exists_msg = "File already exists but --overwrite not given: "
+ if os.path.exists(plot_regional) and not args.overwrite:
+ raise FileExistsError(file_exists_msg + plot_regional)
plot_global = os.path.splitext(mesh_out)[0] + "_global" + ".png"
+ if os.path.exists(plot_global) and not args.overwrite:
+ raise FileExistsError(file_exists_msg + plot_global)
- this_mesh.make_mesh_plot(plot_regional, plot_global)
+ this_mesh.make_mesh_plot(plot_regional, plot_global, args)
if __name__ == "__main__":
diff --git a/python/ctsm/site_and_regional/mesh_plot_type.py b/python/ctsm/site_and_regional/mesh_plot_type.py
index 872c0e8101..08fd1c061c 100644
--- a/python/ctsm/site_and_regional/mesh_plot_type.py
+++ b/python/ctsm/site_and_regional/mesh_plot_type.py
@@ -24,7 +24,7 @@ class MeshPlotType(MeshType):
Extend mesh type with some advanced plotting capability
"""
- def make_mesh_plot(self, plot_regional, plot_global):
+ def make_mesh_plot(self, plot_regional, plot_global, args):
"""
Create plots for the ESMF mesh file
@@ -36,10 +36,10 @@ def make_mesh_plot(self, plot_regional, plot_global):
The path to write the ESMF meshfile global plot
"""
- self.mesh_plot(plot_regional, regional=True)
- self.mesh_plot(plot_global, regional=False)
+ self.mesh_plot(plot_regional, args, regional=True)
+ self.mesh_plot(plot_global, args, regional=False)
- def mesh_plot(self, plot_file, regional):
+ def mesh_plot(self, plot_file, args, regional):
"""Make a plot of a mesh file in either a regional or global grid"""
# -- regional settings
if regional:
@@ -49,7 +49,7 @@ def mesh_plot(self, plot_file, regional):
plot_type = "regional"
line_width = 1
marker = "x"
- marker_size = 1
+ marker_size = 50
# global settings
else:
fig = plt.figure(num=None, figsize=(15, 10), facecolor="w", edgecolor="k")
@@ -58,7 +58,9 @@ def mesh_plot(self, plot_file, regional):
plot_type = "global"
line_width = 0.5
marker = "o"
- marker_size = None
+ marker_size = 0.1
+ if args.no_center_coords:
+ marker_size = 0
ax.add_feature(cfeature.COASTLINE, edgecolor="black")
ax.add_feature(cfeature.BORDERS, edgecolor="black")
@@ -129,8 +131,9 @@ def mesh_plot(self, plot_file, regional):
*[(k, mpatches.Rectangle((0, 0), 1, 1, facecolor=v)) for k, v in lc_colors.items()]
)
- ax.legend(handles, labels)
+ if not args.no_center_coords:
+ ax.legend(handles, labels)
- plt.savefig(plot_file, bbox_inches="tight")
+ plt.savefig(plot_file, bbox_inches="tight", dpi=args.dpi)
logger.info("Successfully created %s plots for ESMF Mesh file : %s", plot_type, plot_file)
diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py
index bc9ce97962..d38aee1308 100644
--- a/python/ctsm/subset_data.py
+++ b/python/ctsm/subset_data.py
@@ -597,10 +597,6 @@ def setup_files(args, defaults, cesmroot):
clmforcingindir,
os.path.join(defaults.get("surfdat", "dir")),
),
- "mesh_dir": os.path.join(
- clmforcingindir,
- os.path.join(defaults.get("surfdat", "mesh_dir")),
- ),
"fluse_dir": os.path.join(
clmforcingindir,
os.path.join(defaults.get("landuse", "dir")),
@@ -608,7 +604,6 @@ def setup_files(args, defaults, cesmroot):
"fsurf_in": fsurf_in,
"fsurf_out": fsurf_out,
"fluse_in": fluse_in,
- "mesh_surf": defaults.get("surfdat", "mesh_surf"),
"datm_tuple": DatmFiles(
dir_input_datm,
dir_output_datm,
@@ -736,9 +731,6 @@ def subset_region(args, file_dict: dict):
specify_fsurf_out=file_dict["fsurf_out"],
)
- # if region.create_mesh:
- # region.create_mesh_at_reg (file_dict["mesh_dir"], file_dict["mesh_surf"])
-
# -- Create CTSM transient landuse data file
if region.create_landuse:
region.create_landuse_at_reg(
diff --git a/python/ctsm/test/test_advanced_sys_mesh_plotter.py b/python/ctsm/test/test_advanced_sys_mesh_plotter.py
index 090f7806bf..4a7c63ecf6 100755
--- a/python/ctsm/test/test_advanced_sys_mesh_plotter.py
+++ b/python/ctsm/test/test_advanced_sys_mesh_plotter.py
@@ -26,14 +26,27 @@ class SysTestMeshMaker(unittest.TestCase):
def setUp(self):
"""Setup for all tests"""
- testinputs_path = os.path.join(path_to_ctsm_root(), "python/ctsm/test/testinputs")
+ testinputs_path = os.path.join(
+ path_to_ctsm_root(),
+ "python",
+ "ctsm",
+ "test",
+ "testinputs",
+ )
self._testinputs_path = testinputs_path
self._infile = os.path.join(
testinputs_path,
"ESMF_mesh_5x5pt_amazon_from_domain_c230308.nc",
)
self._tempdir = tempfile.mkdtemp()
- self.mesh_out = self._tempdir + "/mesh_out"
+ self.mesh_out = os.path.join(self._tempdir, "mesh_out")
+ self.test_basic_argv = [
+ "mesh_plotter",
+ "--input",
+ self._infile,
+ "--output",
+ self.mesh_out,
+ ]
def tearDown(self):
"""
@@ -43,15 +56,44 @@ def tearDown(self):
def test_basic(self):
"""Do a simple basic test"""
+ sys.argv = self.test_basic_argv
+ main()
+ plotfiles = glob.glob(os.path.join(self._tempdir, "*.png"))
+ if not plotfiles:
+ self.fail("plot files were NOT created as they should have")
+
+ def test_dpi(self):
+ """Test setting dpi"""
+ sys.argv = self.test_basic_argv + [
+ "--dpi",
+ "198.7",
+ ]
+ main()
+ plotfiles = glob.glob(os.path.join(self._tempdir, "*.png"))
+ if not plotfiles:
+ self.fail("plot files were NOT created as they should have")
+
+ def test_need_overwrite(self):
+ """Ensure failure if output file exists but --overwrite not given"""
+ sys.argv = self.test_basic_argv
+ main()
+ with self.assertRaisesRegex(
+ FileExistsError, "File already exists but --overwrite not given"
+ ):
+ main()
+
+ def test_outdir(self):
+ """Test that --outdir option works"""
+ outdir = os.path.join(self._tempdir, "abc123")
sys.argv = [
"mesh_plotter",
"--input",
self._infile,
- "--output",
- self.mesh_out,
+ "--outdir",
+ outdir,
]
main()
- plotfiles = glob.glob(self._tempdir + "/*.png")
+ plotfiles = glob.glob(os.path.join(outdir, "*.png"))
if not plotfiles:
self.fail("plot files were NOT created as they should have")
diff --git a/python/ctsm/test/test_sys_gen_mksurfdata_namelist.py b/python/ctsm/test/test_sys_gen_mksurfdata_namelist.py
index 29745e9d80..c9741daf5c 100755
--- a/python/ctsm/test/test_sys_gen_mksurfdata_namelist.py
+++ b/python/ctsm/test/test_sys_gen_mksurfdata_namelist.py
@@ -62,7 +62,7 @@ def test_simple_namelist(self):
main()
self.assertTrue(os.path.exists(self.outfile), "Output surface dataset file should exist")
- def test_vic_nocrop_inlandwet_glc_namelist(self):
+ def test_nocrop_inlandwet_glc_namelist(self):
"""
Test a namelist with several options on
"""
@@ -75,7 +75,6 @@ def test_vic_nocrop_inlandwet_glc_namelist(self):
"1850",
"--res",
"1.9x2.5",
- "--vic",
"--nocrop",
"--inlandwet",
"--glc",
@@ -99,7 +98,6 @@ def test_hires_namelist(self):
"mpasa15",
"--glc-nec",
"10",
- "--hires_pft",
"--hires_soitex",
]
)
diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py
index 143b632e8c..a918fb35f0 100755
--- a/python/ctsm/test/test_unit_subset_data.py
+++ b/python/ctsm/test/test_unit_subset_data.py
@@ -48,7 +48,7 @@ def test_inputdata_setup_files_basic(self):
files = setup_files(self.args, self.defaults, self.cesmroot)
self.assertEqual(
files["fsurf_in"],
- "surfdata_0.9x1.25_hist_2000_16pfts_c240216.nc",
+ "surfdata_0.9x1.25_hist_2000_16pfts_c240908.nc",
"fsurf_in filename not whats expected",
)
self.assertEqual(
diff --git a/python/ctsm/test/testinputs/default_data.cfg b/python/ctsm/test/testinputs/default_data.cfg
index d736f2e933..24aa811fee 100644
--- a/python/ctsm/test/testinputs/default_data.cfg
+++ b/python/ctsm/test/testinputs/default_data.cfg
@@ -15,16 +15,16 @@ precname = CLMGSWP3v1.Precip
tpqwname = CLMGSWP3v1.TPQW
[surfdat]
-dir = lnd/clm2/surfdata_esmf/ctsm5.2.0
-surfdat_16pft = surfdata_0.9x1.25_hist_2000_16pfts_c240216.nc
-surfdat_78pft = surfdata_0.9x1.25_hist_2000_78pfts_c240216.nc
+dir = lnd/clm2/surfdata_esmf/ctsm5.3.0
+surfdat_16pft = surfdata_0.9x1.25_hist_2000_16pfts_c240908.nc
+surfdat_78pft = surfdata_0.9x1.25_hist_2000_78pfts_c240908.nc
mesh_dir = share/meshes/
mesh_surf = fv0.9x1.25_141008_ESMFmesh.nc
[landuse]
-dir = lnd/clm2/surfdata_esmf/ctsm5.2.0
-landuse_16pft = landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240216.nc
-landuse_78pft = landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240216.nc
+dir = lnd/clm2/surfdata_esmf/ctsm5.3.0
+landuse_16pft = landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240908.nc
+landuse_78pft = landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240908.nc
[domain]
file = share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc
diff --git a/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py b/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py
index 6c38efdd0d..5de67adb12 100755
--- a/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py
+++ b/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py
@@ -42,6 +42,7 @@
"crop-global-hist-low-res",
"crop-global-hist-ne16",
"crop-global-hist-ne30",
+ "crop-global-hist-f09",
"crop-global-SSP1-1.9-f09",
"crop-global-SSP1-2.6-f09",
"crop-global-SSP2-4.5-f09",
@@ -198,6 +199,7 @@ def main():
"low_res_no_crop": ["4x5", "10x15"],
"ultra_hi_res_no_crop": ["mpasa15", "mpasa3p75"],
"standard_res": ["360x720cru", "0.9x1.25", "1.9x2.5", "C96", "mpasa120"],
+ "standard_res_no_f09": ["360x720cru", "1.9x2.5", "C96", "mpasa120"],
"low_res": ["4x5", "10x15", "ne3np4.pg3"],
"mpasa480": ["mpasa480"],
"nldas_res": ["0.125nldas2"],
@@ -209,11 +211,13 @@ def main():
"ne0np4.ARCTICGRIS.ne30x8",
"ne0np4.ARCTIC.ne30x4",
"ne0np4CONUS.ne30x8",
+ "ne0np4.POLARCAP.ne30x4",
],
"ne120": [
"ne0np4.ARCTICGRIS.ne30x8",
"ne0np4.ARCTIC.ne30x4",
"ne0np4CONUS.ne30x8",
+ "ne0np4.POLARCAP.ne30x4",
"ne120np4.pg3",
],
}
@@ -243,11 +247,11 @@ def main():
"5x5_amazon",
),
"crop-global-present": (
- "--start-year 2000 --end-year 2000 --vic --res",
+ "--start-year 2000 --end-year 2000 --res",
"standard_res",
),
"crop-global-present-low-res": (
- "--start-year 2000 --end-year 2000 --vic --res",
+ "--start-year 2000 --end-year 2000 --res",
"low_res",
),
"crop-global-present-ne16": (
@@ -267,7 +271,6 @@ def main():
"mpasa480",
),
"crop-global-present-nldas": (
- # TODO slevis: --hirespft uses old data for now, so keep out
"--start-year 2000 --end-year 2000 --res",
"nldas_res",
),
@@ -296,21 +299,25 @@ def main():
"mpasa480",
),
"crop-global-hist": (
- "--start-year 1850 --end-year 2015 --nosurfdata --res",
- "standard_res",
+ "--start-year 1850 --end-year 2023 --nosurfdata --res",
+ "standard_res_no_f09",
),
"crop-global-hist-low-res": (
- "--start-year 1850 --end-year 2015 --nosurfdata --res",
+ "--start-year 1850 --end-year 2023 --nosurfdata --res",
"low_res",
),
"crop-global-hist-ne16": (
- "--start-year 1850 --end-year 2015 --nosurfdata --res",
+ "--start-year 1850 --end-year 2023 --nosurfdata --res",
"ne16",
),
"crop-global-hist-ne30": (
- "--start-year 1850 --end-year 2015 --nosurfdata --res",
+ "--start-year 1850 --end-year 2023 --nosurfdata --res",
"ne30",
),
+ "crop-global-hist-f09": (
+ "--start-year 1700 --end-year 2023 --res",
+ "f09",
+ ),
"crop-global-SSP1-1.9-f09": (
"--start-year 1850 --end-year 2100 --nosurfdata --ssp-rcp SSP1-1.9 --res",
"f09",
diff --git a/python/ctsm/toolchain/gen_mksurfdata_namelist.py b/python/ctsm/toolchain/gen_mksurfdata_namelist.py
index 361616419f..da9420871b 100755
--- a/python/ctsm/toolchain/gen_mksurfdata_namelist.py
+++ b/python/ctsm/toolchain/gen_mksurfdata_namelist.py
@@ -166,16 +166,6 @@ def get_parser():
dest="input_path",
default="/glade/campaign/cesm/cesmdata/inputdata/",
)
- parser.add_argument(
- "--vic",
- help="""
- Flag for adding the fields required for the VIC model.
- [default: %(default)s]
- """,
- action="store_true",
- dest="vic_flag",
- default=False,
- )
parser.add_argument(
"--inlandwet",
help="""
@@ -196,19 +186,6 @@ def get_parser():
dest="glc_flag",
default=False,
)
- parser.add_argument(
- "--hires_pft",
- help="""
- If you want to use the high-resolution pft dataset rather
- than the default lower resolution dataset.
- (Low resolution is at quarter-degree, high resolution at 3-minute)
- [Note: hires only available for 1850 and 2005.]
- [default: %(default)s]
- """,
- action="store_true",
- dest="hres_pft",
- default=False,
- )
parser.add_argument(
"--hires_soitex",
help="""
@@ -273,13 +250,12 @@ def main():
input_path = args.input_path
nocrop_flag = args.crop_flag
nosurfdata_flag = args.surfdata_flag
- vic_flag = args.vic_flag
inlandwet = args.inlandwet
glc_flag = args.glc_flag
potveg = args.potveg_flag
glc_nec = args.glc_nec
- hires_pft, hires_soitex = process_hires_options(args, start_year, end_year)
+ hires_soitex = process_hires_options(args)
if force_model_mesh_file is not None:
open_mesh_file(force_model_mesh_file, force_model_mesh_nx, force_model_mesh_ny)
@@ -310,7 +286,6 @@ def main():
# create attribute list for parsing xml file
attribute_list = {
- "hires_pft": hires_pft,
"hires_soitex": hires_soitex,
"pft_years": pft_years,
"pft_years_ssp": pft_years_ssp,
@@ -400,7 +375,6 @@ def main():
force_model_mesh_file,
force_model_mesh_nx,
force_model_mesh_ny,
- vic_flag,
rawdata_files,
landuse_fname,
mksrf_ftopostats_override,
@@ -413,7 +387,6 @@ def main():
# -------------------
write_nml_outdata(
nosurfdata_flag,
- vic_flag,
inlandwet,
glc_flag,
hostname,
@@ -436,43 +409,31 @@ def main():
print(f"Successfully created input namelist file {nlfname}")
-def process_hires_options(args, start_year, end_year):
+def process_hires_options(args):
"""
Process options related to hi-res
"""
- if args.hres_pft:
- if (start_year == 1850 and end_year == 1850) or (start_year == 2005 and end_year == 2005):
- hires_pft = "on"
- else:
- error_msg = (
- "ERROR: for --hires_pft you must set both start-year "
- "and end-year to 1850 or to 2005"
- )
- sys.exit(error_msg)
- else:
- hires_pft = "off"
-
if args.hres_soitex:
hires_soitex = "on"
else:
hires_soitex = "off"
- return hires_pft, hires_soitex
+ return hires_soitex
def check_ssp_years(start_year, end_year):
"""
Check years associated with SSP period
"""
- if int(start_year) > 2015:
+ if int(start_year) > 2023:
error_msg = (
- "ERROR: if start-year > 2015 must add an --ssp_rcp "
+ "ERROR: if start-year > 2023 must add an --ssp_rcp "
"argument that is not none: valid opts for ssp-rcp "
f"are {valid_opts}"
)
sys.exit(error_msg)
- elif int(end_year) > 2015:
+ elif int(end_year) > 2023:
error_msg = (
- "ERROR: if end-year > 2015 must add an --ssp-rcp "
+ "ERROR: if end-year > 2023 must add an --ssp-rcp "
"argument that is not none: valid opts for ssp-rcp "
f"are {valid_opts}"
)
@@ -537,6 +498,8 @@ def determine_pft_years(start_year, end_year, potveg):
pft_years_ssp = "-999"
if potveg:
pft_years = "PtVg"
+ elif int(start_year) == 1700 and int(end_year) == 1700:
+ pft_years = "1700"
elif int(start_year) == 1850 and int(end_year) == 1850:
pft_years = "1850"
elif int(start_year) == 2000 and int(end_year) == 2000:
@@ -545,21 +508,21 @@ def determine_pft_years(start_year, end_year, potveg):
pft_years = "2005"
elif int(start_year) >= 850 and int(end_year) <= 1849:
pft_years = "0850-1849"
- elif int(start_year) >= 1850 and int(start_year) <= 2100 and int(end_year) <= 2015:
- pft_years = "1850-2015"
- elif int(start_year) >= 1850 and int(start_year) <= 2100 and int(end_year) <= 2100:
- pft_years = "1850-2015"
- pft_years_ssp = "2016-2100"
- elif int(start_year) >= 2016 and int(start_year) <= 2100 and int(end_year) <= 2100:
+ elif int(start_year) >= 1700 and int(start_year) <= 2100 and int(end_year) <= 2023:
+ pft_years = "1700-2023"
+ elif int(start_year) >= 1700 and int(start_year) <= 2100 and int(end_year) <= 2100:
+ pft_years = "1700-2023"
+ pft_years_ssp = "2024-2100"
+ elif int(start_year) >= 2023 and int(start_year) <= 2100 and int(end_year) <= 2100:
pft_years = "-999"
- pft_years_ssp = "2016-2100"
+ pft_years_ssp = "2024-2100"
else:
error_msg = (
f"ERROR: start_year is {start_year} and end_year is "
f"{end_year}; expected start/end-year options are: "
- "- 1850, 2000, 2005 for time-slice options "
+ "- 1700, 1850, 2000, 2005 for time-slice options "
"- in the range from 850 to 1849 "
- "- in the range from 1850 to 2100 "
+ "- in the range from 1700 to 2100 "
"- TODO in the range from 2101 to 2300 "
"- OR user must set the potveg_flag "
)
@@ -571,7 +534,6 @@ def determine_pft_years(start_year, end_year, potveg):
def write_nml_outdata(
nosurfdata_flag,
- vic_flag,
inlandwet,
glc_flag,
hostname,
@@ -602,7 +564,6 @@ def write_nml_outdata(
nlfile.write(f" numpft = {num_pft} \n")
nlfile.write(f" no_inlandwet = .{str(not inlandwet).lower()}. \n")
nlfile.write(f" outnc_3dglc = .{str(glc_flag).lower()}. \n")
- nlfile.write(f" outnc_vic = .{str(vic_flag).lower()}. \n")
nlfile.write(" outnc_large_files = .false. \n")
nlfile.write(" outnc_double = .true. \n")
nlfile.write(f" logname = '{logname}' \n")
@@ -615,7 +576,6 @@ def write_nml_rawinput(
force_model_mesh_file,
force_model_mesh_nx,
force_model_mesh_ny,
- vic_flag,
rawdata_files,
landuse_fname,
mksrf_ftopostats_override,
@@ -641,11 +601,11 @@ def write_nml_rawinput(
for key, value in rawdata_files.items():
if key == "mksrf_ftopostats" and mksrf_ftopostats_override != "":
nlfile.write(f" mksrf_ftopostats_override = '{mksrf_ftopostats_override}' \n")
- elif "_fvic" not in key and "mksrf_fvegtyp" not in key and "mksrf_fgrid" not in key:
+ elif "mksrf_fvegtyp" not in key and "mksrf_fgrid" not in key:
# write everything else
nlfile.write(f" {key} = '{value}' \n")
- if start_year <= 2015:
+ if start_year <= 2023:
mksrf_fvegtyp = rawdata_files["mksrf_fvegtyp"]
mksrf_fvegtyp_mesh = rawdata_files["mksrf_fvegtyp_mesh"]
mksrf_fhrvtyp = rawdata_files["mksrf_fvegtyp"]
@@ -690,12 +650,6 @@ def write_nml_rawinput(
nlfile.write(f" mksrf_fpctlak = '{mksrf_fpctlak}' \n")
nlfile.write(f" mksrf_furban = '{mksrf_furban}' \n")
- if vic_flag:
- mksrf_fvic = rawdata_files["mksrf_fvic"]
- nlfile.write(f" mksrf_fvic = '{mksrf_fvic}' \n")
- mksrf_fvic_mesh = rawdata_files["mksrf_fvic_mesh"]
- nlfile.write(f" mksrf_fvic_mesh = '{mksrf_fvic_mesh}' \n")
-
nlfile.write(f" mksrf_fdynuse = '{landuse_fname} ' \n")
return must_run_download_input_data
@@ -714,7 +668,7 @@ def handle_transient_run(
with open(landuse_fname, "w", encoding="utf-8") as landuse_file:
for year in range(start_year, end_year + 1):
year_str = str(year)
- if year <= 2015:
+ if year <= 2023:
file1 = rawdata_files["mksrf_fvegtyp"]
file2 = rawdata_files["mksrf_fvegtyp_urban"]
file3 = rawdata_files["mksrf_fvegtyp_lake"]
@@ -832,16 +786,15 @@ def determine_input_rawdata(start_year, input_path, attribute_list):
max_match_child = child2
if max_match_child is None:
- # TODO slevis: Are these if-statements backwards?
- # For years greater than 2015 - mksrf_fvegtyp_ssp must have a match
- if start_year <= 2015:
+ # For years greater than 2023 - mksrf_fvegtyp_ssp must have a match
+ if start_year > 2023:
if "mksrf_fvegtyp_ssp" not in child1.tag:
error_msg = f"ERROR: {child1.tag} has no matches"
sys.exit(error_msg)
else:
continue
else:
- # For years less than 2015 - mksrf_fvegtyp must have a match
+ # For years less than 2023 - mksrf_fvegtyp must have a match
if "mksrf_fvegtyp" not in child1.tag:
error_msg = f"ERROR: {child1.tag} has no matches"
sys.exit(error_msg)
diff --git a/src/biogeochem/FireEmisFactorsMod.F90 b/src/biogeochem/FireEmisFactorsMod.F90
index e97082c0b8..de4b8280ec 100644
--- a/src/biogeochem/FireEmisFactorsMod.F90
+++ b/src/biogeochem/FireEmisFactorsMod.F90
@@ -11,6 +11,8 @@ module FireEmisFactorsMod
use shr_kind_mod, only : r8 => shr_kind_r8
use abortutils, only : endrun
use clm_varctl, only : iulog
+ use clm_varpar, only : maxveg
+ use pftconMod, only : nc3crop
!
implicit none
private
@@ -21,7 +23,6 @@ module FireEmisFactorsMod
public :: fire_emis_factors_get
! !PRIVATE MEMBERS:
- integer :: npfts ! number of plant function types
!
type emis_eff_t
real(r8), pointer :: eff(:) ! emissions efficiency factor
@@ -52,7 +53,6 @@ subroutine fire_emis_factors_get( comp_name, factors, molecwght )
! Method for getting FireEmis information for a named compound
!
! !USES:
- use pftconMod , only : nc3crop
! !ARGUMENTS:
character(len=*),intent(in) :: comp_name ! FireEmis compound name
real(r8), intent(out) :: factors(:) ! vegetation type factors for the compound of interest
@@ -73,9 +73,11 @@ subroutine fire_emis_factors_get( comp_name, factors, molecwght )
call endrun(errmes)
endif
- factors(:npfts) = comp_factors_table( ndx )%eff(:npfts)
- if ( size(factors) > npfts )then
- factors(npfts+1:) = comp_factors_table( ndx )%eff(nc3crop)
+ factors(:maxveg) = comp_factors_table( ndx )%eff(:maxveg)
+ ! If fire emissions factor file only includes natural PFT's, but this is a crop case
+ ! Copy the generic crop factors to the crop CFT's from generic crop
+ if ( size(factors) > nc3crop )then
+ factors(nc3crop+1:) = comp_factors_table( ndx )%eff(nc3crop)
end if
molecwght = comp_factors_table( ndx )%wght
@@ -96,7 +98,7 @@ subroutine fire_emis_factors_init( filename )
use ncdio_pio, only : ncd_pio_openfile,ncd_inqdlen
use pio, only : pio_inq_varid,pio_get_var,file_desc_t,pio_closefile
use fileutils , only : getfil
- use clm_varpar , only : mxpft
+ use clm_varpar, only : mxpft
!
! !ARGUMENTS:
character(len=*),intent(in) :: filename ! FireEmis factors input file
@@ -126,16 +128,20 @@ subroutine fire_emis_factors_init( filename )
call ncd_inqdlen( ncid, dimid, n_comps, name='Comp_Num')
call ncd_inqdlen( ncid, dimid, n_pfts, name='PFT_Num')
- npfts = n_pfts
- if ( npfts /= mxpft .and. npfts /= 16 )then
- call endrun('Number of PFTs on fire emissions file is NOT correct. Its neither the total number of PFTS nor 16')
+ if ( (n_pfts < maxveg) .and. (n_pfts < nc3crop) )then
+ write(iulog,*) ' n_pfts = ', n_pfts, ' maxveg = ', maxveg, ' nat_pft = ', nc3crop
+ call endrun('Number of PFTs on the fire emissions file is less than the number of natural PFTs from the surface dataset')
+ end if
+ if ( n_pfts > mxpft )then
+ write(iulog,*) ' n_pfts = ', n_pfts, ' mxpft = ', mxpft
+ call endrun('Number of PFTs on the fire emissions file is more than the max number of PFTs from the surface dataset with crops')
end if
ierr = pio_inq_varid(ncid,'Comp_EF', comp_ef_vid)
ierr = pio_inq_varid(ncid,'Comp_Name',comp_name_vid)
ierr = pio_inq_varid(ncid,'Comp_MW', comp_mw_vid)
- allocate( comp_factors(n_pfts) )
+ allocate( comp_factors(maxveg) )
allocate( comp_names(n_comps) )
allocate( comp_molecwghts(n_comps) )
@@ -146,7 +152,7 @@ subroutine fire_emis_factors_init( filename )
call bld_hash_table_indices( comp_names )
do i=1,n_comps
start=(/i,1/)
- count=(/1,npfts/)
+ count=(/1,min(n_pfts,maxveg)/)
ierr = pio_get_var( ncid, comp_ef_vid, start, count, comp_factors )
call enter_hash_data( trim(comp_names(i)), comp_factors, comp_molecwghts(i) )
diff --git a/src/biogeophys/SoilTemperatureMod.F90 b/src/biogeophys/SoilTemperatureMod.F90
index 0dc8876d24..d6c9660b96 100644
--- a/src/biogeophys/SoilTemperatureMod.F90
+++ b/src/biogeophys/SoilTemperatureMod.F90
@@ -671,9 +671,9 @@ subroutine SoilThermProp (bounds, num_urbanc, filter_urbanc, num_nolakec, filter
tk_wall => urbanparams_inst%tk_wall , & ! Input: [real(r8) (:,:) ] thermal conductivity of urban wall
tk_roof => urbanparams_inst%tk_roof , & ! Input: [real(r8) (:,:) ] thermal conductivity of urban roof
tk_improad => urbanparams_inst%tk_improad , & ! Input: [real(r8) (:,:) ] thermal conductivity of urban impervious road
- cv_wall => urbanparams_inst%cv_wall , & ! Input: [real(r8) (:,:) ] thermal conductivity of urban wall
- cv_roof => urbanparams_inst%cv_roof , & ! Input: [real(r8) (:,:) ] thermal conductivity of urban roof
- cv_improad => urbanparams_inst%cv_improad , & ! Input: [real(r8) (:,:) ] thermal conductivity of urban impervious road
+ cv_wall => urbanparams_inst%cv_wall , & ! Input: [real(r8) (:,:) ] heat capacity of urban wall
+ cv_roof => urbanparams_inst%cv_roof , & ! Input: [real(r8) (:,:) ] heat capacity of urban roof
+ cv_improad => urbanparams_inst%cv_improad , & ! Input: [real(r8) (:,:) ] heat capacity of urban impervious road
t_soisno => temperature_inst%t_soisno_col , & ! Input: [real(r8) (:,:) ] soil temperature [K]
diff --git a/src/main/clm_initializeMod.F90 b/src/main/clm_initializeMod.F90
index 340a799908..bf9a97ae58 100644
--- a/src/main/clm_initializeMod.F90
+++ b/src/main/clm_initializeMod.F90
@@ -58,7 +58,7 @@ subroutine initialize1(dtime)
use clm_varcon , only: clm_varcon_init
use landunit_varcon , only: landunit_varcon_init
use clm_varctl , only: fsurdat, version
- use surfrdMod , only: surfrd_get_num_patches, surfrd_get_nlevurb
+ use surfrdMod , only: surfrd_get_num_patches, surfrd_get_nlevurb, surfrd_compat_check
use controlMod , only: control_init, control_print, NLFilename
use ncdio_pio , only: ncd_pio_init
use initGridCellsMod , only: initGridCells
@@ -100,6 +100,7 @@ subroutine initialize1(dtime)
call control_init(dtime)
call ncd_pio_init()
+ call surfrd_compat_check(fsurdat)
call surfrd_get_num_patches(fsurdat, actual_maxsoil_patches, actual_numpft, actual_numcft)
call surfrd_get_nlevurb(fsurdat, actual_nlevurb)
diff --git a/src/main/glc2lndMod.F90 b/src/main/glc2lndMod.F90
index 2d0dbb5791..c2e6290300 100644
--- a/src/main/glc2lndMod.F90
+++ b/src/main/glc2lndMod.F90
@@ -35,7 +35,7 @@ module glc2lndMod
! Public data
! ------------------------------------------------------------------------
- ! Where we should do runoff routing that is appropriate for having a dynamic icesheet underneath.
+ ! Where we should do SMB-related runoff routing that is appropriate for having a dynamic icesheet underneath.
real(r8), pointer :: glc_dyn_runoff_routing_grc (:) => null()
! ------------------------------------------------------------------------
@@ -383,32 +383,32 @@ subroutine check_glc2lnd_icemask(this, bounds)
if (this%icemask_grc(g) > 0._r8) then
- ! Ensure that icemask is a subset of has_virtual_columns. This is needed because
- ! we allocated memory based on has_virtual_columns, so it is a problem if the
- ! ice sheet tries to expand beyond the area defined by has_virtual_columns.
- if (.not. this%glc_behavior%has_virtual_columns_grc(g)) then
- write(iulog,'(a)') subname//' ERROR: icemask must be a subset of has_virtual_columns.'
- write(iulog,'(a)') 'Ensure that the glacier_region_behavior namelist item is set correctly.'
- write(iulog,'(a)') '(It should specify "virtual" for the region corresponding to the GLC domain.)'
- write(iulog,'(a)') 'If glacier_region_behavior is set correctly, then you can fix this problem'
- write(iulog,'(a)') 'by modifying GLACIER_REGION on the surface dataset.'
- write(iulog,'(a)') '(Expand the region that corresponds to the GLC domain'
- write(iulog,'(a)') '- i.e., the region specified as "virtual" in glacier_region_behavior.)'
- call endrun(subgrid_index=g, subgrid_level=subgrid_level_gridcell, msg=errMsg(sourcefile, __LINE__))
- end if
-
- ! Ensure that icemask is a subset of melt_replaced_by_ice. This is needed
- ! because we only compute SMB in the region given by melt_replaced_by_ice
- ! (according to the logic for building the do_smb filter), and we need SMB
- ! everywhere inside the icemask.
- if (.not. this%glc_behavior%melt_replaced_by_ice_grc(g)) then
- write(iulog,'(a)') subname//' ERROR: icemask must be a subset of melt_replaced_by_ice.'
- write(iulog,'(a)') 'Ensure that the glacier_region_melt_behavior namelist item is set correctly.'
- write(iulog,'(a)') '(It should specify "replaced_by_ice" for the region corresponding to the GLC domain.)'
- write(iulog,'(a)') 'If glacier_region_behavior is set correctly, then you can fix this problem'
- write(iulog,'(a)') 'by modifying GLACIER_REGION on the surface dataset.'
- write(iulog,'(a)') '(Expand the region that corresponds to the GLC domain'
- write(iulog,'(a)') '- i.e., the region specified as "replaced_by_ice" in glacier_region_melt_behavior.)'
+ ! Ensure that, within the icemask, there are no points that have (non-virtual
+ ! and compute-SMB). This is important for two reasons:
+ !
+ ! (1) To ensure that, in grid cells where we're producing SMB, we have SMB for
+ ! all elevation classes, so that the downscaling / vertical interpolation
+ ! can be done correctly.
+ !
+ ! (2) To avoid conservation issues, we want to ensure that, in grid cells where
+ ! we're producing SMB and are dynamically coupled to the ice sheet (if 2-way
+ ! coupling is enabled), glacier areas are remaining in-sync with glc. (Note
+ ! that has_virtual_columns_grc dictates where we're able to keep glacier
+ ! areas in sync with glc.) (In principle, I think this one could check
+ ! icemask_coupled_fluxes rather than icemask; we check icemask because we
+ ! needed to check icemask for the other reason anyway; this is okay because
+ ! icemask_coupled_fluxes is a subset of icemask.)
+ if (this%glc_behavior%melt_replaced_by_ice_grc(g) .and. &
+ .not. this%glc_behavior%has_virtual_columns_grc(g)) then
+ write(iulog,'(a)') subname//' ERROR: Within the icemask, there cannot be any points that have'
+ write(iulog,'(a)') '(non-virtual and compute-SMB).'
+ write(iulog,'(a)') 'Ensure that GLACIER_REGION on the surface dataset and the namelist items,'
+ write(iulog,'(a)') 'glacier_region_behavior and glacier_region_melt_behavior are all set correctly:'
+ write(iulog,'(a)') 'Typically, the region encompassing the active GLC domain should specify'
+ write(iulog,'(a)') 'glacier_region_behavior="virtual" and glacier_region_melt_behavior="replaced_by_ice".'
+ write(iulog,'(a)') '(But it is also okay for part of the GLC domain to have'
+ write(iulog,'(a)') 'glacier_region_melt_behavior="remains_in_place"; this part of the domain can have'
+ write(iulog,'(a)') 'any setting for glacier_region_behavior.)'
call endrun(subgrid_index=g, subgrid_level=subgrid_level_gridcell, msg=errMsg(sourcefile, __LINE__))
end if
@@ -437,10 +437,12 @@ subroutine check_glc2lnd_icemask_coupled_fluxes(this, bounds)
do g = bounds%begg, bounds%endg
- ! Ensure that icemask_coupled_fluxes is a subset of icemask. Although there
- ! currently is no code in CLM that depends on this relationship, it seems helpful
- ! to ensure that this intuitive relationship holds, so that code developed in the
- ! future can rely on it.
+ ! Ensure that icemask_coupled_fluxes is a subset of icemask. This is helpful to
+ ! ensure that the consistency checks that are done on glc behaviors within the
+ ! icemask (in check_glc2lnd_icemask) also apply within the icemask_coupled_fluxes
+ ! region. Other than that convenience, there currently is no code in CLM that
+ ! depends on this relationship, but it seems helpful to ensure that this intuitive
+ ! relationship holds, so that code developed in the future can rely on it.
if (this%icemask_coupled_fluxes_grc(g) > 0._r8 .and. this%icemask_grc(g) == 0._r8) then
write(iulog,*) subname//' ERROR: icemask_coupled_fluxes must be a subset of icemask.'
call endrun(subgrid_index=g, subgrid_level=subgrid_level_gridcell, msg=errMsg(sourcefile, __LINE__))
@@ -477,70 +479,73 @@ subroutine update_glc2lnd_dyn_runoff_routing(this, bounds)
! where CISM is running in diagnostic-only mode and therefore is not sending a calving flux -
! we have glc_dyn_runoff_routing = 0, and the snowcap flux goes to the runoff model.
! This is needed to conserve water correctly in the absence of a calving flux.
+ !
+ ! In places where we are not computing SMB, we also have glc_dyn_runoff_routing = 0.
+ ! Currently glc_dyn_runoff_routing is only used where we're computing SMB, but if it
+ ! were ever used elsewhere, it seems best to have it set to 0 there: this seems
+ ! consistent with the fact that we zero out the SMB flux sent to GLC in that region.
+ ! (However, it's possible that, once we start actually using glc_dyn_runoff_routing
+ ! for some purpose outside the do_smb filter, we'll discover that this logic should
+ ! be changed.)
do g = bounds%begg, bounds%endg
- ! Set glc_dyn_runoff_routing_grc(g) to a value in the range [0,1].
- !
- ! This value gives the grid cell fraction that is deemed to be coupled to the
- ! dynamic ice sheet model. For this fraction of the grid cell, snowcap fluxes are
- ! sent to the ice sheet model. The remainder of the grid cell sends snowcap fluxes
- ! to the runoff model.
- !
- ! Note: The coupler (in prep_glc_mod.F90) assumes that the fraction coupled to the
- ! dynamic ice sheet model is min(lfrac, Sg_icemask_l), where lfrac is the
- ! "frac" component of fraction_lx, and Sg_icemask_l is obtained by mapping
- ! Sg_icemask_g from the glc to the land grid. Here, ldomain%frac is
- ! equivalent to lfrac, and this%icemask_grc is equivalent to Sg_icemask_l.
- ! However, here we use icemask_coupled_fluxes_grc, so that we route all snow
- ! capping to runoff in areas where the ice sheet is not generating calving
- ! fluxes. In addition, here we need to divide by lfrac, because the coupler
- ! multiplies by it later (and, for example, if lfrac = 0.1 and
- ! icemask_coupled_fluxes = 1, we want all snow capping to go to the ice
- ! sheet model, not to the runoff model).
- !
- ! Note: In regions where CLM overlaps the CISM domain, this%icemask_grc(g) typically
- ! is nearly equal to ldomain%frac(g). So an alternative would be to simply set
- ! glc_dyn_runoff_routing_grc(g) = icemask_grc(g).
- ! The reason to cap glc_dyn_runoff_routing at lfrac is to avoid sending the
- ! ice sheet model a greater mass of water (in the form of snowcap fluxes)
- ! than is allowed to fall on a CLM grid cell that is part ocean.
-
- ! TODO(wjs, 2017-05-08) Ideally, we wouldn't have this duplication in logic
- ! between the coupler and CLM. The best solution would be to have the coupler
- ! itself do the partitioning of the snow capping flux between the ice sheet model
- ! and the runoff model. A next-best solution would be to have the coupler send a
- ! field to CLM telling it what fraction of snow capping should go to the runoff
- ! model in each grid cell.
-
- if (ldomain%frac(g) == 0._r8) then
- ! Avoid divide by 0; note that, in this case, the amount going to runoff isn't
- ! important for system-wide conservation, so we could really choose anything we
- ! want.
- this%glc_dyn_runoff_routing_grc(g) = this%icemask_coupled_fluxes_grc(g)
- else
- this%glc_dyn_runoff_routing_grc(g) = &
- min(ldomain%frac(g), this%icemask_coupled_fluxes_grc(g)) / &
- ldomain%frac(g)
- end if
-
- if (this%glc_dyn_runoff_routing_grc(g) > 0.0_r8) then
-
- ! Ensure that glc_dyn_runoff_routing is a subset of melt_replaced_by_ice. This
- ! is needed because glacial melt is only sent to the runoff stream in the region
- ! given by melt_replaced_by_ice (because the latter is used to create the do_smb
- ! filter, and the do_smb filter controls where glacial melt is computed).
- if (.not. this%glc_behavior%melt_replaced_by_ice_grc(g)) then
- write(iulog,'(a)') subname//' ERROR: icemask_coupled_fluxes must be a subset of melt_replaced_by_ice.'
- write(iulog,'(a)') 'Ensure that the glacier_region_melt_behavior namelist item is set correctly.'
- write(iulog,'(a)') '(It should specify "replaced_by_ice" for the region corresponding to the GLC domain.)'
- write(iulog,'(a)') 'If glacier_region_behavior is set correctly, then you can fix this problem'
- write(iulog,'(a)') 'by modifying GLACIER_REGION on the surface dataset.'
- write(iulog,'(a)') '(Expand the region that corresponds to the GLC domain'
- write(iulog,'(a)') '- i.e., the region specified as "replaced_by_ice" in glacier_region_melt_behavior.)'
- call endrun(subgrid_index=g, subgrid_level=subgrid_level_gridcell, msg=errMsg(sourcefile, __LINE__))
+ if (this%glc_behavior%melt_replaced_by_ice_grc(g)) then
+ ! As noted in the comments at the top of this routine, we only set
+ ! glc_dyn_runoff_routing where we are computing SMB
+
+ ! Set glc_dyn_runoff_routing_grc(g) to a value in the range [0,1].
+ !
+ ! This value gives the grid cell fraction that is deemed to be coupled to the
+ ! dynamic ice sheet model. For this fraction of the grid cell, snowcap fluxes are
+ ! sent to the ice sheet model. The remainder of the grid cell sends snowcap fluxes
+ ! to the runoff model.
+ !
+ ! Note: The coupler (in prep_glc_mod.F90) assumes that the fraction coupled to the
+ ! dynamic ice sheet model is min(lfrac, Sg_icemask_l), where lfrac is the
+ ! "frac" component of fraction_lx, and Sg_icemask_l is obtained by mapping
+ ! Sg_icemask_g from the glc to the land grid. Here, ldomain%frac is
+ ! equivalent to lfrac, and this%icemask_grc is equivalent to Sg_icemask_l.
+ ! However, here we use icemask_coupled_fluxes_grc, so that we route all snow
+ ! capping to runoff in areas where the ice sheet is not generating calving
+ ! fluxes. In addition, here we need to divide by lfrac, because the coupler
+ ! multiplies by it later (and, for example, if lfrac = 0.1 and
+ ! icemask_coupled_fluxes = 1, we want all snow capping to go to the ice
+ ! sheet model, not to the runoff model).
+ !
+ ! Note: In regions where CLM overlaps the CISM domain, this%icemask_grc(g) typically
+ ! is nearly equal to ldomain%frac(g). So an alternative would be to simply set
+ ! glc_dyn_runoff_routing_grc(g) = icemask_grc(g).
+ ! The reason to cap glc_dyn_runoff_routing at lfrac is to avoid sending the
+ ! ice sheet model a greater mass of water (in the form of snowcap fluxes)
+ ! than is allowed to fall on a CLM grid cell that is part ocean.
+
+ ! TODO(wjs, 2017-05-08) Ideally, we wouldn't have this duplication in logic
+ ! between the coupler and CLM. The best solution would be to have the coupler
+ ! itself do the partitioning of the snow capping flux between the ice sheet model
+ ! and the runoff model. A next-best solution would be to have the coupler send a
+ ! field to CLM telling it what fraction of snow capping should go to the runoff
+ ! model in each grid cell.
+
+ if (ldomain%frac(g) == 0._r8) then
+ ! Avoid divide by 0; note that, in this case, the amount going to runoff isn't
+ ! important for system-wide conservation, so we could really choose anything we
+ ! want.
+ this%glc_dyn_runoff_routing_grc(g) = this%icemask_coupled_fluxes_grc(g)
+ else
+ this%glc_dyn_runoff_routing_grc(g) = &
+ min(ldomain%frac(g), this%icemask_coupled_fluxes_grc(g)) / &
+ ldomain%frac(g)
end if
+
+ else ! .not. this%glc_behavior%melt_replaced_by_ice_grc(g)
+ ! As noted in the comments at the top of this routine, we set
+ ! glc_dyn_runoff_routing to 0 where we are not computing SMB. (This assumes that
+ ! gridcells where we compute SMB are the same as gridcells for which
+ ! melt_replaced_by_ice is true.)
+ this%glc_dyn_runoff_routing_grc(g) = 0._r8
end if
+
end do
end subroutine update_glc2lnd_dyn_runoff_routing
@@ -578,8 +583,15 @@ subroutine update_glc2lnd_fracs(this, bounds)
if (glc_do_dynglacier) then
do g = bounds%begg, bounds%endg
- ! Values from GLC are only valid within the icemask, so we only update CLM's areas there
- if (this%icemask_grc(g) > 0._r8) then
+ ! Values from GLC are only valid within the icemask, so we only update CLM's
+ ! areas there. Also, we only update areas where the glacier region behavior is
+ ! 'virtual', because that's the only region where we are guaranteed to have all
+ ! of the elevation classes we need in order to remain in sync. (Note that, for
+ ! conservation purposes, it's important that we update areas in all regions
+ ! where we're fully-two-way-coupled to the icesheet and we're computing SMB;
+ ! this requirement is checked in check_glc2lnd_icemask.) (This conditional
+ ! should be kept consistent with the conditional in update_glc2lnd_topo.)
+ if (this%icemask_grc(g) > 0._r8 .and. this%glc_behavior%has_virtual_columns_grc(g)) then
! Set total ice landunit area
area_ice = sum(this%frac_grc(g, 1:maxpatch_glc))
@@ -623,7 +635,7 @@ subroutine update_glc2lnd_fracs(this, bounds)
msg="at least one glc column has non-zero area from cpl but has no slot in memory")
end if ! error
end if ! area_ice > 0
- end if ! this%icemask_grc(g) > 0
+ end if ! this%icemask_grc(g) > 0 .and. this%glc_behavior%has_virtual_columns_grc(g)
end do ! g
end if ! glc_do_dynglacier
@@ -667,8 +679,12 @@ subroutine update_glc2lnd_topo(this, bounds, topo_col, needs_downscaling_col)
l = col%landunit(c)
g = col%gridcell(c)
- ! Values from GLC are only valid within the icemask, so we only update CLM's topo values there
- if (this%icemask_grc(g) > 0._r8) then
+ ! Values from GLC are only valid within the icemask, so we only update CLM's
+ ! topo values there. Also, consistently with the conditional in
+ ! update_glc2lnd_fracs, we only update topo values where the glacier region
+ ! behavior is 'virtual': it could be problematic to update topo values in a
+ ! grid cell where we're not updating areas.
+ if (this%icemask_grc(g) > 0._r8 .and. this%glc_behavior%has_virtual_columns_grc(g)) then
if (lun%itype(l) == istice) then
ice_class = col_itype_to_ice_class(col%itype(c))
else
diff --git a/src/main/glcBehaviorMod.F90 b/src/main/glcBehaviorMod.F90
index 0476fe3ecb..8076f8fcaf 100644
--- a/src/main/glcBehaviorMod.F90
+++ b/src/main/glcBehaviorMod.F90
@@ -72,9 +72,10 @@ module glcBehaviorMod
logical, allocatable, public :: allow_multiple_columns_grc(:)
! If melt_replaced_by_ice_grc(g) is true, then any glacier ice melt in gridcell g
- ! runs off and is replaced by ice. Note that SMB cannot be computed in gridcell g if
- ! melt_replaced_by_ice_grc(g) is false, since we can't compute a sensible negative
- ! smb in that case.
+ ! runs off and is replaced by ice. This flag is also used to determine where we
+ ! compute SMB: We compute SMB in all grid cells for which melt_replaced_by_ice_grc is
+ ! true. (SMB cannot be computed in gridcells where melt_replaced_by_ice_grc is false,
+ ! since we can't compute a sensible negative SMB in that case.)
logical, allocatable, public :: melt_replaced_by_ice_grc(:)
! If ice_runoff_melted_grc(g) is true, then ice runoff generated by the
@@ -310,6 +311,7 @@ subroutine InitFromInputs(this, begg, endg, &
call translate_glacier_region_behavior
call translate_glacier_region_melt_behavior
call translate_glacier_region_ice_runoff_behavior
+ call check_behaviors
call this%InitAllocate(begg, endg)
@@ -405,7 +407,7 @@ subroutine translate_glacier_region_behavior
glacier_region_behavior(i) = BEHAVIOR_SINGLE_AT_ATM_TOPO
case (behavior_str_unset)
write(iulog,*) ' ERROR: glacier_region_behavior not specified for ID ', i
- write(iulog,*) 'You probably need to extend the glacier_region_behavior namelist array'
+ write(iulog,*) 'You may need to extend the glacier_region_behavior namelist array.'
call endrun(msg=' ERROR: glacier_region_behavior not specified for ID '// &
errMsg(sourcefile, __LINE__))
case default
@@ -436,7 +438,7 @@ subroutine translate_glacier_region_melt_behavior
glacier_region_melt_behavior(i) = MELT_BEHAVIOR_REMAINS_IN_PLACE
case (behavior_str_unset)
write(iulog,*) ' ERROR: glacier_region_melt_behavior not specified for ID ', i
- write(iulog,*) 'You probably need to extend the glacier_region_melt_behavior namelist array'
+ write(iulog,*) 'You may need to extend the glacier_region_melt_behavior namelist array.'
call endrun(msg=' ERROR: glacier_region_melt_behavior not specified for ID '// &
errMsg(sourcefile, __LINE__))
case default
@@ -467,7 +469,7 @@ subroutine translate_glacier_region_ice_runoff_behavior
glacier_region_ice_runoff_behavior(i) = ICE_RUNOFF_BEHAVIOR_MELTED
case (behavior_str_unset)
write(iulog,*) ' ERROR: glacier_region_ice_runoff_behavior not specified for ID ', i
- write(iulog,*) 'You probably need to extend the glacier_region_ice_runoff_behavior namelist array'
+ write(iulog,*) 'You may need to extend the glacier_region_ice_runoff_behavior namelist array.'
call endrun(msg=' ERROR: glacier_region_ice_runoff_behavior not specified for ID '// &
errMsg(sourcefile, __LINE__))
case default
@@ -481,7 +483,28 @@ subroutine translate_glacier_region_ice_runoff_behavior
end do
end subroutine translate_glacier_region_ice_runoff_behavior
- end subroutine InitFromInputs
+ subroutine check_behaviors
+ ! Check the various behaviors for validity / consistency
+ integer :: i
+
+ do i = min_glacier_region_id, max_glacier_region_id
+ if (glacier_region_melt_behavior(i) == MELT_BEHAVIOR_REPLACED_BY_ICE .and. &
+ glacier_region_ice_runoff_behavior(i) == ICE_RUNOFF_BEHAVIOR_MELTED) then
+ write(iulog,*) ' ERROR: Bad glacier region behavior combination for ID ', i
+ write(iulog,*) 'You cannot combine glacier_region_melt_behavior = "replaced_by_ice"'
+ write(iulog,*) 'with glacier_region_ice_runoff_behavior = "melted".'
+ write(iulog,*) 'While there is nothing fundamentally wrong with this combination,'
+ write(iulog,*) 'it can result in problematic, non-physical fluxes (particularly,'
+ write(iulog,*) 'a large positive sensible heat flux during glacial melt in regions'
+ write(iulog,*) 'where the icesheet is not fully dynamic and two-way-coupled;'
+ write(iulog,*) 'see https://github.com/ESCOMP/ctsm/issues/423 for details).'
+ call endrun(msg=' ERROR: Bad glacier region behavior combination '// &
+ errMsg(sourcefile, __LINE__))
+ end if
+ end do
+ end subroutine check_behaviors
+
+ end subroutine InitFromInputs
!-----------------------------------------------------------------------
diff --git a/src/main/lnd2glcMod.F90 b/src/main/lnd2glcMod.F90
index 27fa7639d7..26359ff261 100644
--- a/src/main/lnd2glcMod.F90
+++ b/src/main/lnd2glcMod.F90
@@ -169,10 +169,28 @@ subroutine update_lnd2glc(this, bounds, num_do_smb_c, filter_do_smb_c, &
character(len=*), parameter :: subname = 'update_lnd2glc'
!------------------------------------------------------------------------------
- ! Initialize to reasonable defaults
+ ! Initialize to reasonable defaults. These values will be sent for gridcells /
+ ! columns outside the do_smb filter.
+ ! NOTE(wjs, 2018-07-03) qice should be 0 outside the do_smb filter to ensure conservation
this%qice_grc(bounds%begg : bounds%endg, :) = 0._r8
+
+ ! NOTE(wjs, 2018-07-03) tsrf can be anything outside the do_smb filter; 0 deg C seems
+ ! as reasonable as anything (based on input from Bill Lipscomb and Gunter Leguy)
this%tsrf_grc(bounds%begg : bounds%endg, :) = tfrz
+
+ ! NOTE(wjs, 2018-07-03) The topo values outside the do_smb filter could matter for
+ ! gridcells where we compute SMB for some but not all elevation classes (possibly
+ ! because we haven't even allocated memory for some elevation classes - i.e., if we're
+ ! not using the 'virtual' behavior in that gridcell). In glc2lndMod, we ensure that
+ ! this cannot occur for gridcells within the icemask (i.e., within the icemask, we
+ ! ensure that there are no points that have (non-virtual and compute-SMB)), so this
+ ! isn't a conservation issue, but it could still be important, e.g., for generating
+ ! appropriate forcings for a later dlnd-driven T compset. I'm not sure what is "right"
+ ! here. We've historically used 0 for this, and maybe that's as good as anything,
+ ! because it may lead to the 0 SMB values being ignored for the sake of vertical
+ ! interpolation, but I'm not sure about this. But maybe it would be better to use
+ ! topo at the center of each elevation class?
this%topo_grc(bounds%begg : bounds%endg, :) = 0._r8
! Fill the lnd->glc data on the clm grid
diff --git a/src/main/surfrdMod.F90 b/src/main/surfrdMod.F90
index 12212e2160..88d43a09cc 100644
--- a/src/main/surfrdMod.F90
+++ b/src/main/surfrdMod.F90
@@ -7,7 +7,7 @@ module surfrdMod
!
! !USES:
#include "shr_assert.h"
- use shr_kind_mod , only : r8 => shr_kind_r8
+ use shr_kind_mod , only : r8 => shr_kind_r8, r4 => shr_kind_r4
use shr_log_mod , only : errMsg => shr_log_errMsg
use abortutils , only : endrun
use clm_varpar , only : nlevsoifl
@@ -27,6 +27,7 @@ module surfrdMod
save
!
! !PUBLIC MEMBER FUNCTIONS:
+ public :: surfrd_compat_check ! Check that this surface dataset is compatible
public :: surfrd_get_data ! Read surface dataset and determine subgrid weights
public :: surfrd_get_num_patches ! Read surface dataset to determine maxsoil_patches and numcft
public :: surfrd_get_nlevurb ! Read surface dataset to determine nlevurb
@@ -45,6 +46,70 @@ module surfrdMod
contains
+ !-----------------------------------------------------------------------
+ subroutine surfrd_compat_check ( lfsurdat )
+ !
+ ! !DESCRIPTION:
+ ! Check compatability for this surface dataset and abort with an error if it's not
+ !
+ ! !USES:
+ use ncdio_pio, only : check_att
+ ! !ARGUMENTS:
+ character(len=*), intent(in) :: lfsurdat ! surface dataset filename
+ ! !LOCAL VARIABLES:
+ type(file_desc_t) :: ncid ! netcdf id
+ logical :: exists ! If attribute or variable was found on the file
+ integer :: status ! Status return code
+ real(r4) :: version ! Version number on the dataset
+ ! NOTE: Only increment the expected_version when surface datasets are incompatible with the previous version
+ ! If datasets are just updated data and backwards compatble leave the expected version alone
+ real(r4), parameter :: expected_version = 5.3_r4
+ character(len=50) :: description
+ character(len=*), parameter :: version_name = 'Dataset_Version'
+
+ call ncd_pio_openfile (ncid, trim(lfsurdat), 0)
+ call check_att(ncid, pio_global, version_name, exists)
+ if (exists) then
+ status = pio_get_att(ncid, pio_global, version_name, version)
+ else
+ ! For a few previous versions guess on the compatability version based on existence of variables
+ call check_var( ncid, 'PCT_OCEAN', exists)
+ if (exists) then
+ version = 5.2_r4
+ else
+ call check_var( ncid, 'CONST_HARVEST_SH1', exists)
+ if (exists) then
+ version = 5.0_r4
+ else
+ call check_var( ncid, 'GLACIER_REGION', exists)
+ if (exists) then
+ version = 4.5_r4
+ else
+ ! This is a version before the main clm4_5 dataseta so marking it as 0 for unknown
+ version = 0.0_r4
+ end if
+ end if
+ end if
+ end if
+ call ncd_pio_closefile(ncid)
+ if ( (version /= expected_version) )then
+ if ( version < expected_version )then
+ description = 'older'
+ if ( version == 0.0_r4 ) description = trim(description)//' than 4.5'
+ else if ( version > expected_version )then
+ description = 'newer'
+ end if
+ if ( masterproc )then
+ write(iulog,*) 'Input surface dataset is: ', trim(lfsurdat)
+ write(iulog,'(3a)') 'This surface dataset is ', trim(description), ' and incompatible with this version of CTSM'
+ write(iulog,'(a,f3.1,a,f3.1)') 'Dataset version = ', version, ' Version expected = ', expected_version
+ write(iulog,*) errMsg(sourcefile, __LINE__)
+ end if
+ call endrun(msg="ERROR: Incompatible surface dataset")
+ end if
+
+ end subroutine surfrd_compat_check
+
!-----------------------------------------------------------------------
subroutine surfrd_get_data (begg, endg, ldomain, lfsurdat, actual_numcft)
!
diff --git a/src/main/test/glcBehavior_test/test_glcBehavior.pf b/src/main/test/glcBehavior_test/test_glcBehavior.pf
index ff104458b1..37271d2f98 100644
--- a/src/main/test/glcBehavior_test/test_glcBehavior.pf
+++ b/src/main/test/glcBehavior_test/test_glcBehavior.pf
@@ -170,7 +170,7 @@ contains
call glc_behavior%InitFromInputs(bounds%begg, bounds%endg, &
glacier_region_map = [0], &
glacier_region_behavior_str = ['single_at_atm_topo'], &
- glacier_region_melt_behavior_str = ['replaced_by_ice'], &
+ glacier_region_melt_behavior_str = ['remains_in_place'], &
glacier_region_ice_runoff_behavior_str = ['melted'])
@assertTrue(glc_behavior%ice_runoff_melted_grc(bounds%begg))
diff --git a/src/main/test/topo_test/test_topo.pf b/src/main/test/topo_test/test_topo.pf
index 196ce34763..82c3b2cb90 100644
--- a/src/main/test/topo_test/test_topo.pf
+++ b/src/main/test/topo_test/test_topo.pf
@@ -251,13 +251,33 @@ contains
expected_filter = col_filter_empty(bounds)
call this%topo%Init(bounds)
- ! Need icemask 0, because we can't have single_at_atm_topo inside the icemask
- call this%do_UpdateTopo(glc_behavior, icemask_grc = grc_array(0._r8))
+ call this%do_UpdateTopo(glc_behavior, icemask_grc = grc_array(1._r8))
filter = this%topo%DownscaleFilterc(bounds)
@assertTrue(filter == expected_filter)
end subroutine downscaleFilter_afterUpdate_doesNotContain_singleAtAtmTopo
+ @Test
+ subroutine downscaleFilter_afterUpdate_contains_vegInsideIcemaskAndVirtual(this)
+ ! We expect the downscaleFilter to contain vegetated points if they are both (1)
+ ! inside the icemask, and (2) in the 'virtual' region - because topo is updated in
+ ! that region.
+ class(TestTopo), intent(inout) :: this
+ type(glc_behavior_type) :: glc_behavior
+ type(filter_col_type) :: filter
+ type(filter_col_type) :: expected_filter
+
+ call setup_single_veg_patch(pft_type = 1)
+ glc_behavior = create_glc_behavior_all_virtual()
+ expected_filter = col_filter_from_index_array(bounds, [bounds%begc])
+
+ call this%topo%Init(bounds)
+ call this%do_UpdateTopo(glc_behavior, icemask_grc = grc_array(1._r8))
+ filter = this%topo%DownscaleFilterc(bounds)
+
+ @assertTrue(filter == expected_filter)
+ end subroutine downscaleFilter_afterUpdate_contains_vegInsideIcemaskAndVirtual
+
@Test
subroutine downscaleFilter_afterUpdate_doesNotContain_vegOutsideIcemask(this)
class(TestTopo), intent(inout) :: this
@@ -266,8 +286,6 @@ contains
type(filter_col_type) :: expected_filter
call setup_single_veg_patch(pft_type = 1)
- ! Use 'virtual' behavior, to make sure that we're not accidentally trying to
- ! downscale vegetation over virtual columns.
glc_behavior = create_glc_behavior_all_virtual()
expected_filter = col_filter_empty(bounds)
@@ -279,7 +297,10 @@ contains
end subroutine downscaleFilter_afterUpdate_doesNotContain_vegOutsideIcemask
@Test
- subroutine downscaleFilter_afterUpdate_contains_vegInsideIcemask(this)
+ subroutine downscaleFilter_afterUpdate_doesNotContain_vegNonVirtual(this)
+ ! Since topo is only updated in the 'virtual' region, we expect the downscale filter
+ ! to NOT include vegetated points outside the 'virtual' region, because topo
+ ! shouldn't be updated for those vegetated points.
class(TestTopo), intent(inout) :: this
type(glc_behavior_type) :: glc_behavior
type(filter_col_type) :: filter
@@ -287,14 +308,36 @@ contains
call setup_single_veg_patch(pft_type = 1)
glc_behavior = create_glc_behavior_all_multiple()
- expected_filter = col_filter_from_index_array(bounds, [bounds%begc])
+ expected_filter = col_filter_empty(bounds)
call this%topo%Init(bounds)
call this%do_UpdateTopo(glc_behavior, icemask_grc = grc_array(1._r8))
filter = this%topo%DownscaleFilterc(bounds)
@assertTrue(filter == expected_filter)
- end subroutine downscaleFilter_afterUpdate_contains_vegInsideIcemask
+ end subroutine downscaleFilter_afterUpdate_doesNotContain_vegNonVirtual
+
+ @Test
+ subroutine topo_changes_for_glcmecInsideIcemaskAndVirtual(this)
+ class(TestTopo), intent(inout) :: this
+ type(glc_behavior_type) :: glc_behavior
+ real(r8), parameter :: topo_orig = 7._r8
+ real(r8), parameter :: atm_topo = 23._r8
+
+ ! our column should get set to this:
+ real(r8), parameter :: glc_topo = 27._r8
+
+ call setup_single_ice_column(elev_class = 1)
+ glc_behavior = create_glc_behavior_all_virtual()
+ topo_glc_mec(:,:) = topo_orig
+
+ call this%topo%Init(bounds)
+ call this%do_UpdateTopo(glc_behavior, icemask_grc = grc_array(1._r8), &
+ atm_topo_grc = grc_array(atm_topo), &
+ glc_topo = glc_topo)
+
+ @assertEqual(glc_topo, this%topo%topo_col(bounds%begc))
+ end subroutine topo_changes_for_glcmecInsideIcemaskAndVirtual
@Test
subroutine topo_noChange_for_glcmecOutsideIcemask(this)
@@ -319,17 +362,17 @@ contains
end subroutine topo_noChange_for_glcmecOutsideIcemask
@Test
- subroutine topo_changes_for_glcmecInsideIcemask(this)
+ subroutine topo_noChange_for_glcmecNonVirtual(this)
class(TestTopo), intent(inout) :: this
type(glc_behavior_type) :: glc_behavior
real(r8), parameter :: topo_orig = 7._r8
- real(r8), parameter :: atm_topo = 23._r8
- ! our column should get set to this:
+ ! our column should NOT get set to either of these:
+ real(r8), parameter :: atm_topo = 23._r8
real(r8), parameter :: glc_topo = 27._r8
call setup_single_ice_column(elev_class = 1)
- glc_behavior = create_glc_behavior_all_virtual()
+ glc_behavior = create_glc_behavior_all_multiple()
topo_glc_mec(:,:) = topo_orig
call this%topo%Init(bounds)
@@ -337,8 +380,8 @@ contains
atm_topo_grc = grc_array(atm_topo), &
glc_topo = glc_topo)
- @assertEqual(glc_topo, this%topo%topo_col(bounds%begc))
- end subroutine topo_changes_for_glcmecInsideIcemask
+ @assertEqual(topo_orig, this%topo%topo_col(bounds%begc))
+ end subroutine topo_noChange_for_glcmecNonVirtual
@Test
subroutine topo_changes_for_singleAtAtmTopo(this)
diff --git a/tools/README b/tools/README
index 568dc1239b..693e793370 100644
--- a/tools/README
+++ b/tools/README
@@ -11,8 +11,6 @@ I. General directory structure:
crop_calendars --- Regrid and process GGCMI sowing and harvest date files for use in CTSM.
mkmapgrids ------- Create regular lat/lon SCRIP grid files
- mkprocdata_map --- Convert output unstructured grids into a 2D format that
- can be plotted easily
site_and_regional Scripts for handling input datasets for site and regional cases.
These scripts both help with creation of datasets using the
@@ -33,49 +31,6 @@ I. General directory structure:
II. Notes on building/running for each of the above tools:
- mkprocdata_map has the following files to facilitate building the FORTRAN code:
-
- README ------- Specific help for using the specific tool and help on specific
- files in that directory.
- src/Filepath ----- List of directories needed to build the tool
- (some files in ../src directories are required).
- src/Makefile ----- GNU Makefile to build the tool
- (these are identical between tools.
- src/Macros.custom Customization of make macros for the particular tool in question
- src/Srcfiles ----- List of source files that are needed.
- src/Mkdepends ---- Dependency generator program
-
- To build:
-
- cd
- setenv INC_NETCDF
- setenv LIB_NETCDF
- gmake
-
- The process will create a file called "Depends" which has the dependencies
- for the build of each file on other files.
-
- By default some codes may be compiled non-optimized
- so that you can use the debugger, and with bounds-checking, and float trapping on.
- To speed up do the following...
-
- gmake OPT=TRUE
-
- Also some of the tools allow for OpenMP shared memory parallelism
- (such as mksurfdata) with
-
- gmake SMP=TRUE
-
- To run a program with a namelist:
-
- ./program < namelist
-
- To run a program built with SMP=TRUE:
-
- setenv OMP_NUM_THREADS=
-
- run normally as above
-
mksurfdata_esmf has a cime configure and CMake based build using the following files:
gen_mksurfdata_build ---- Build mksurfdata_esmf
diff --git a/tools/README.testing b/tools/README.testing
deleted file mode 100644
index 9c386a3b26..0000000000
--- a/tools/README.testing
+++ /dev/null
@@ -1,58 +0,0 @@
-tools/README.testing May/23/2011
-
-There is automated testing for all of the tools and scripts under this tools directory.
-The tests are in the test/tools directory and are any of the scripts
-that have "tools" in the name. There are several assumptions made in order for the
-testing to work.
-
-
-1.) Executable name is the same as the directory name
-
-The name of the executable program is the same as the directory name of the tool.
-
-2.) Build works the same for any Fortran tools
-
-The build for any Fortran tools should work the same way, with the same options
-and required files for it. The files: Makefile, Mkdepends, Filepath and Srcfile
-are expected to exist in the tool "src" sub-directory. To make maintaining these files easier
-in general the Makefile and Mkdepends files should be kept identical other than
-default settings for OPT and SMP and the output executable name.
-
-Options to the Makefile:
-
- LIB_NETCDF --- Library directory location of NetCDF. (defaults to /usr/local/lib)
- INC_NETCDF --- Include directory location of NetCDF. (defaults to /usr/local/include)
- MOD_NETCDF --- Module directory location of NetCDF. (defaults to $LIB_NETCDF)
- USER_FC ------ Allow user to override the default Fortran compiler specified in Makefile.
- USER_FCTYP --- Allow user to override the default type of Fortran compiler
- (Linux and USER_FC=ftn only).
- USER_CC ------ Allow user to override the default C compiler specified in Makefile
- (Linux only).
- USER_LINKER -- Allow user to override the default linker specified in Makefile.
- SMP ---------- Shared memory Multi-processing (TRUE or FALSE) [default is FALSE]
- OPT ---------- Use optimized options. (TRUE or FALSE)
-
-3.) Successful completion of the tool ends with "Successfully ..."
-
-After the tool completes it should have an unique string telling of the
-successful completion of the file that is searchable in the log file starting
-with "Successfully ". If this string isn't found in the log file, it
-will be assumed by the test setup that the test failed.
-
-4.) NetCDF files are created or modified
-
-It is assumed that NetCDF files are created and/or modified by the process. And if
-NetCDF files are NOT created -- something went wrong. For some scripts that copy files
-to other locations this means an "-nomv" option needs to be invoked (and one provided)
-so that it leaves the files created in the current directory.
-
-5.) Namelist examples exist .* namelist files
-
-To specify options for the running of the tool, sample namelist files
-are provided or a sample run options file. These files can EITHER be in the
-tool directory OR the ../../test/tools/nl_files directory.
-
-6.) Specific tests for run scripts
-
-For tools that have scripts to create namelists and run the tool for you, there
-are customized tests to run these tools.
diff --git a/tools/mkprocdata_map/README b/tools/mkprocdata_map/README
deleted file mode 100644
index 92ffb4856c..0000000000
--- a/tools/mkprocdata_map/README
+++ /dev/null
@@ -1,151 +0,0 @@
-$CTSMROOT/tools/mkprocdata_map/clm/README Oct 30, 2012
-
-This directory contains scripts for regridding CLM output from an
-unstructured grid (1-d output using the lndgrid dimension) to a 2-d
-(lat/lon) grid. The regridding method is area-conservative.
-
-The following steps provide a method to create the necessary inputs to
-this script, produce an executable, and regrid output:
-
-In the following instructions, the "original" resolution is the
-resolution of the run on an unstructured grid, and the "target"
-resolution is the regular lat/lon resolution to which you will regrid
-the output.
-
-(0) Install prerequisites:
-
- (a) If you do not already have a mapping file from the original
- resolution to the target resolution, you will need the
- ESMF_RegridWeightGen tool installed on your system.
-
- (b) The wrapper scripts describe below require the netCDF operators
- (NCO). These nco tools (ncks, ncap2, etc.) must be in your path.
-
-(1) Determine the target resolution. This resolution must be a regular
- lat/lon resolution. Generally, this should be a resolution close
- to the resolution of the CLM run. For example, when running CLM at
- ne30_np4 resolution, a good target resolution is 0.9x1.25 (i.e.,
- finite volume 1 degree: f09); when running CLM at ne120_np4
- resolution, a good target resolution is 0.23x0.31 (i.e., finitev
- volume 1/4 degree: f02).
-
-(2) Perform a short CLM run at the target resolution, producing at
- least one history file. After this run completes, set the
- environment variable $TEMPLATE_FILE to point to one of the history
- files created by this run.
-
-(3) Create a conservative mapping file from the original resolution to
- the target resolution using the ESMF regrid weight generator. The
- basic method for doing this is:
-
- $ESMF_PATH/bin/ESMF_RegridWeightGen -s $INGRID -d $OUTGRID -m conserve -w $MAP_FILE -i
-
- where $INGRID gives the path to a SCRIP grid file at the original
- resolution, $OUTGRID gives the path to a SCRIP grid file at the
- template resolution, and $MAP_FILE gives the name of the mapping
- file that will be generated.
-
- However, you may want to wrap this in a job script to run it on
- multiple processors (using mpirun), and you may have to set other
- machine-specific environment variables.
-
-(4) Build the mkprocdata_map tool. From the current directory, do the
- following:
-
- > cd src
- > gmake
- > cd ..
-
- By default code compiles optimized so it's reasonably fast. If you want
- to use the debugger, with bounds-checking, and float trapping on do the
- following:
- gmake OPT=FALSE
- See Also: See the components/clm/tools/README file for notes about setting
- the path for NetCDF.
-
- This builds the mkprocdata_map executable. However, you generally
- will not want to run this executable directly: instead, you should
- use one of the wrapper scripts described below.
-
-(5) Do the regridding using one of the wrapper scripts in this
- directory. To determine which script is most appropriate: Do you
- need to regrid just one or a few output files, or most/all of the
- output files in a directory?
-
- (a) If you are regridding just one or a few output files, you can
- use mkprocdata_map_wrap. Its usage is:
-
- > mkprocdata_map_wrap -i input_file -o output_file -m $MAP_FILE -t $TEMPLATE_FILE
-
- where:
- - input_file is the CLM history file you want to regrid
- - output_file is the name of the regridded file that will be
- created
- - $MAP_FILE is the ESMF conservative mapping file created in
- step (3)
- - $TEMPLATE_FILE is a CLM history file at the target resolution,
- created in step (2)
-
- You may also specify the '-l' option to this script. This option
- determines whether to determine landfrac and related variables
- by regridding the input file (when you don't give the '-l'
- option), or by copying these variables from the template file
- (when you give the '-l' option). These variables are important
- for computing regional and global averages, e.g., as is done in
- the land diagnostics package. Each method may be reasonable,
- depending on the purposes of the regridding. For example, if you
- want regional/global integrals to be as true as possible to the
- original run, you should run withOUT the '-l' option; but if you
- want to compare regional/global integrals between the original
- run and a run at the target resolution, then you may want to run
- WITH the '-l' option.
-
- Run 'mkprocdata_map_wrap -h' for full usage
-
- (b) If you need to regrid most or all of the output files in a
- directory, you can use the convenience script
- mkprocdata_map_all. This script runs mkprocdata_map_wrap on all
- files matching a given pattern within a directory. Its basic
- usage is the following, done from a directory containing many
- CLM history files:
-
- > /path/to/mkprocdata_map_all -p $CASE -m $MAP_FILE -t $TEMPLATE_FILE
-
- where:
- - $CASE is the case name of the original run (this -p argument
- is actually more general: it provides the prefix of files on
- which mkprocdata_map_wrap should be run; run
- 'mkprocdata_map_all -h' for details)
- - $MAP_FILE is the ESMF conservative mapping file created in
- step (3)
- - $TEMPLATE_FILE is a CLM history file at the target resolution,
- created in step (2)
-
- There are a number of additional optional arguments to this
- script, including the '-l' option described in (a), above. Run
- 'mkprocdata_map_all -h' for full usage.
-
-
-------------------------------------------------------------------------
-Some miscellaneous notes on the scripts contained here
-------------------------------------------------------------------------
-
-- area vs. area_regridded in the output of mkprocdata_map_wrap and
- mkprocdata_map_all: The 'area' variable gives the actual grid cell
- area on the destination grid. The 'area_regridded' variable is the
- result of performing the regridding procedure on the 'area' variable
- in the original source data. This seems to be the wrong way to
- regrid areas (e.g., it leads to global totals that do not make
- sense). However, area_regridded is left in the regridded files as a
- diagnostic. BUT PLEASE USE CAUTION IF USING THIS AREA_REGRIDDED
- VALUE, UNLESS YOU KNOW WHAT IT REALLY REPRESENTS!
-
-- At least as of this writing (Oct 29, 2012), there is insufficient
- metadata on the CLM history files to regrid all variables
- perfectly. In particular, note that many CLM history variables apply
- only over a subset of the grid cell (e.g., over the non-lake portion
- of the grid cell). Thus, to regrid these variables appropriately, we
- would need to weight each grid cell's value by the portion of the
- grid cell over which the field applies. However, doing this would
- require metadata about each field that is not currently
- available.
diff --git a/tools/mkprocdata_map/README.filedescriptions b/tools/mkprocdata_map/README.filedescriptions
deleted file mode 100644
index e657e7c7d9..0000000000
--- a/tools/mkprocdata_map/README.filedescriptions
+++ /dev/null
@@ -1,25 +0,0 @@
-$CTSMROOT/tools/mkprocdata_map/README.filedescriptions Erik Kluzek
- 06/08/2018
-
-mkprocdata_map_all ------------ Script to run over a list of files
-mkprocdata_map_wrap ----------- Main script to actually use
-mkprocdata_map_functions.bash - Bash shell functions to use in other scripts
-README ------------------------ Description and how to run
-src --------------------------- Directory with FORTRAN source code
-
-Also there are some sample files that can be used for testing under inputdata in
-
-$DIN_LOC_ROOT/lnd/clm2/test_mkprocdata_map
-
-See how this is done by looking at the file for testing mkprocdata_map:
-
-../../test/tools/nl_files/mkprocdata_ne30_to_f19_I2000
-
-Which does something like the following:
-
-./mkprocdata_map_wrap \
--i $DIN_LOC_ROOT/lnd/clm2/test_mkprocdata_map/clm4054_ne30g16_I2000.clm2.h0.2000-01_c170430.nc \
--o ne30output_onf19grid.nc \
--m $DIN_LOC_ROOT/lnd/clm2/test_mkprocdata_map/map_ne30np4_nomask_to_fv1.9x2.5_nomask_aave_da_c121107.nc \
--t $DIN_LOC_ROOT/lnd/clm2/test_mkprocdata_map/clm4054_f19g16_I2000.clm2.h0.2000-01_c170430.nc
-
diff --git a/tools/mkprocdata_map/mkprocdata_map_all b/tools/mkprocdata_map/mkprocdata_map_all
deleted file mode 100755
index 73e8abedf1..0000000000
--- a/tools/mkprocdata_map/mkprocdata_map_all
+++ /dev/null
@@ -1,202 +0,0 @@
-#!/bin/bash
-
-# This script runs mkprocdata_map_wrap on all files matching a given
-# pattern within a directory.
-
-# Created by Bill Sacks, 5-26-11
-
-# ----------------------------------------------------------------------
-# LOCAL FUNCTIONS DEFINED HERE
-# ----------------------------------------------------------------------
-
-function Usage {
- script_name=`basename $0`
- echo "Usage: $script_name -p prefix -m map_file -t template_file [-d] [-e executable-path] [-h] [-i] [-l] [-o output_suffix] [-r diRectory] [-s suffix]"
- echo ""
- echo "This script runs mkprocdata_map_wrap on all files matching a"
- echo "given pattern within a directory."
- echo ""
- echo "'prefix' gives the prefix of the files on which"
- echo "mkprocdata_map_wrap should be run; 'prefix' should NOT contain"
- echo "wildcard characters. The prefix is also used to translate"
- echo "from input to output file names (see examples below)"
- echo ""
- echo "'map_file' gives the name (and full path if not in the current"
- echo "directory) of the mapping file"
- echo ""
- echo "'template_file' gives the name (and full path if not in the"
- echo "current directory) of the template file, from which we read"
- echo "lats, lons and some other variables"
- echo ""
- echo "The following are optional arguments:"
- echo ""
- echo "[-d]: Do a test (Dry run): do all error-checking on"
- echo " arguments and print commands that would be run, but"
- echo " don't actually run commands"
- echo ""
- echo "[-e executable-path]: Gives the path of the mkprocdata_map executable."
- echo " If not specified, the path is determined by the"
- echo " default value in mkprocdata_map_wrap."
- echo ""
- echo "[-h]: Print this help message and exit"
- echo ""
- echo "[-i]: Ignore (skip) existing output files; if this option is"
- echo " not specified, then the script dies with an error if"
- echo " any of the desired output files already exist"
- echo ""
- echo "[-l]: Option passed to mkprocdata_map_wrap: rather than computing"
- echo " landfrac and related variables by regridding the input file,"
- echo " instead copy these variables directly from the template file."
- echo ""
- echo "[-o output_suffix]: suffix to append to the end of the prefix"
- echo " on the output files"
- echo " If not specified, '_2d' is used"
- echo ""
- echo "[-r diRectory]: Do the processing in the given directory."
- echo " If not specified, processing is done in the"
- echo " current working directory."
- echo ""
- echo "[-s suffix]: Run mkprocdata_map_wrap on all files matching the"
- echo " pattern '\${prefix}\${suffix}'. The suffix can -"
- echo " and often will - contain wildcards; but"
- echo " remember to enclose 'suffix' in quotes to"
- echo " prevent shell expansion."
- echo " If not specified, run mkprocdata_map_wrap on all"
- echo " files matching '\${prefix}*'"
- echo ""
- echo ""
- echo "Example: $script_name -p Ib14_ne30np4_gx1v6 -m map_ne30np4_to_fv1.9x2.5_aave_da_091230.nc -t Ib19_1.9x2.5_gx1v6.clm2.h0.0001-01.nc"
- echo "This will run mkprocdata_map_wrap on all files whose names begin"
- echo "with 'Ib14_ne30np4_gx1v6' in the current directory, using the"
- echo "mapping file named 'map_ne30np4_to_fv1.9x2.5_aave_da_091230.nc'"
- echo "and the template file named 'Ib19_1.9x2.5_gx1v6.clm2.h0.0001-01.nc'"
- echo "For an input file named:"
- echo " Ib14_ne30np4_gx1v6.clm2.h0.0001-01-06-00000.nc"
- echo "The output file will be named:"
- echo " Ib14_ne30np4_gx1v6_2d.clm2.h0.0001-01-06-00000.nc"
- echo ""
- echo "Example: $script_name -o '_remap' -s '*.h0.0001*.nc' -p Ib14_ne30np4_gx1v6 -m map_ne30np4_to_fv1.9x2.5_aave_da_091230.nc -t Ib19_1.9x2.5_gx1v6.clm2.h0.0001-01.nc"
- echo "This will run mkprocdata_map_wrap on all files whose names match"
- echo "the pattern 'Ib14_ne30np4_gx1v6*.h0.0001*.nc', in the"
- echo "current directory, using the mapping file named"
- echo "'map_ne30np4_to_fv1.9x2.5_aave_da_091230.nc' and the"
- echo "template file named Ib19_1.9x2.5_gx1v6.clm2.h0.0001-01.nc"
- echo "For an input file named:"
- echo " Ib14_ne30np4_gx1v6.clm2.h0.0001-01-06-00000.nc"
- echo "The output file will be named:"
- echo " Ib14_ne30np4_gx1v6_remap.clm2.h0.0001-01-06-00000.nc"
- echo ""
-}
-
-# ----------------------------------------------------------------------
-# BEGIN MAIN SCRIPT
-# ----------------------------------------------------------------------
-
-script_dir=`dirname $0`
-source $script_dir/mkprocdata_map_functions.bash
-
-# ----------------------------------------------------------------------
-# Handle command-line arguments
-# ----------------------------------------------------------------------
-
-# define default values:
-# required arguments:
-prefix=""
-map_file=""
-template_file=""
-# optional arguments:
-directory="."
-ignore_existing=0
-output_suffix="_2d"
-suffix="*"
-dryrun=0
-extra_args=""
-
-while getopts de:hilm:o:p:r:s:t: opt; do
- case $opt in
- d) dryrun=1;;
- e) extra_args="$extra_args -e $OPTARG";;
- h) Usage; exit;;
- i) ignore_existing=1;;
- l) extra_args="$extra_args -l";;
- m) map_file=$OPTARG;;
- o) output_suffix=$OPTARG;;
- p) prefix=$OPTARG;;
- r) directory=$OPTARG;;
- s) suffix=$OPTARG;;
- t) template_file=$OPTARG;;
- \?) Usage; exit 1
- esac
-done
-
-# ----------------------------------------------------------------------
-# Error checking on arguments
-# ----------------------------------------------------------------------
-
-if [ -z "$prefix" ]; then
- echo "Must specify a prefix"
- Usage
- exit 1
-fi
-
-check_file_arg "$map_file" "map"
-check_file_arg "$template_file" "template"
-
-# Make sure directory is really a directory
-if [ ! -d $directory ]; then
- echo "ERROR: $directory is not a directory"
- echo ""
- Usage
- exit 1
-fi
-
-
-# ----------------------------------------------------------------------
-# Change to desired directory
-# ----------------------------------------------------------------------
-
-olddir=`pwd`
-cd $directory
-
-# ----------------------------------------------------------------------
-# Get list of files matching the given pattern; make sure there really
-# are some matching files
-# ----------------------------------------------------------------------
-
-files=`ls ${prefix}${suffix}`
-if [ $? -ne 0 ]; then
- echo "ERROR trying to find files matching: ${prefix}${suffix}"
- echo ""
- Usage
- exit 1
-fi
-
-# ----------------------------------------------------------------------
-# Loop through files matching the given pattern; run mkprocdata_map_wrap for each
-# ----------------------------------------------------------------------
-
-for infile in $files; do
- outfile=${infile/$prefix/${prefix}${output_suffix}}
- if [ -e $outfile ]; then
- if [ $ignore_existing -eq 0 ]; then
- echo ""
- echo "ERROR: output file $outfile already exists"
- exit 1
- else
- echo ""
- echo "WARNING: output file $outfile already exists: skipping"
- echo ""
- fi
-
- else # outfile does not exist
- echo ""
- do_cmd "${script_dir}/mkprocdata_map_wrap -i $infile -o $outfile -m $map_file -t $template_file $extra_args" $dryrun
- fi
-done
-
-# ----------------------------------------------------------------------
-# Clean up
-# ----------------------------------------------------------------------
-
-cd $olddir
-
diff --git a/tools/mkprocdata_map/mkprocdata_map_functions.bash b/tools/mkprocdata_map/mkprocdata_map_functions.bash
deleted file mode 100644
index bbc359fb89..0000000000
--- a/tools/mkprocdata_map/mkprocdata_map_functions.bash
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/bin/bash
-
-# This file contains functions used by other bash scripts in this directory.
-
-# This function echoes the command given by $1 (cmd), then executes it.
-# However, if $2 (dryrun) is non-zero, then it only does the echo, not the execution.
-# Usage: do_cmd cmd dryrun
-# Returns 0 on success, non-zero on failure; if there is an error, the error string is echoed.
-function do_cmd {
- if [[ $# -ne 2 ]]; then
- echo "ERROR in do_cmd: wrong number of arguments: expected 2, received $#"
- exit 1
- fi
-
- local cmd=$1
- local dryrun=$2
-
- echo $cmd
- if [ $dryrun -eq 0 ]; then
- # We use 'eval $cmd' rather than just '$cmd', because the
- # latter doesn't work right if the command contains any quoted
- # strings (e.g., svn ci -m "this is my message")
- eval $cmd
- if [ $? -ne 0 ]; then
- echo "ERROR in do_cmd: error executing command"
- exit 2
- fi
- fi
-
- return 0
-}
-
-# make sure that the given file name argument was provided, and that
-# the file exists; exit the script with a usage message if either of
-# these is not true
-#
-# Usage: check_file_arg filename_arg description
-# (description is echoed if there is an error)
-# Example: check_file_arg "$input_file" "input"
-# (note that $input_file must be in quotes)
-function check_file_arg {
- local filename=$1
- local description=$2
-
- if [ -z "$filename" ]; then
- echo "ERROR: Must specify $description file"
- Usage
- exit 1
- fi
-
- if [ ! -f $filename ]; then
- echo "ERROR: Can't find $description file: $filename"
- Usage
- exit 1
- fi
-}
-
diff --git a/tools/mkprocdata_map/mkprocdata_map_wrap b/tools/mkprocdata_map/mkprocdata_map_wrap
deleted file mode 100755
index 4744b0eacc..0000000000
--- a/tools/mkprocdata_map/mkprocdata_map_wrap
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/bin/bash
-
-# This script is a wrapper around mkprocdata_map that runs that
-# program and then copies some additional variables from the template
-# file to the output file. It also does some additional pre and
-# post-processing in order to create some additional variables.
-
-# Created by Bill Sacks, 5-25-11
-
-# ----------------------------------------------------------------------
-# SET PARAMETERS HERE
-# ----------------------------------------------------------------------
-
-# comma-delimited list of extra variables to copy directly from
-# template file; note that these variables should not be written out
-# by mkprocdata_map (i.e., everything in this list should be listed in
-# the 'ignore_var' function in mkprocdata_map.F90); however, there may
-# be some variables in the 'ignore_var' function that are not listed
-# here - e.g., variables that we treat specially.
-copy_vars="lon,lat"
-
-# comma-delimited list of extra variables to copy from the template
-# file if the -l option is specified -- this option says to copy
-# landfrac and related variables. Note that some of these variables
-# may be written out by mkprocdata_map, in which case they will be
-# overwritten afterwards (slighly less efficient, but that keeps
-# things simpler).
-landfrac_copy_vars="landfrac,landmask,pftmask"
-
-# name of the executable;
-# expected to be in the same directory as this script unless -e option is given
-executable="mkprocdata_map"
-
-# minimum value for regridded pftmask variable for the output variable to be 1
-pftmask_min="1.e-6"
-
-# fill value for landmask
-landmask_fill=-9999
-
-# ----------------------------------------------------------------------
-# LOCAL FUNCTIONS DEFINED HERE
-# ----------------------------------------------------------------------
-
-function Usage {
- script_name=`basename $0`
- echo "Usage: $script_name -i input_file -o output_file -m map_file -t template_file [-e executable-path] [-h] [-l]"
- echo ""
- echo "This script runs mkprocdata_map with the given arguments (-i, -o, -m and -t),"
- echo "then copies some additional variables from the template file"
- echo "to the output file. It also does some additional pre and"
- echo "post-processing in order to create some additional variables."
- echo ""
- echo "Additional optional arguments:"
- echo ""
- echo "[-e executable-path]: Gives the path of the mkprocdata_map executable."
- echo " If not specified, the executable is assumed to be"
- echo " in the same directory as this script."
- echo ""
- echo "[-h]: Print this help message and exit"
- echo ""
- echo "[-l]: Rather than computing landfrac and related variables"
- echo "by regridding the input file, instead copy these variables"
- echo "directly from the template file. The variables this pertains"
- echo "to are:"
- echo $landfrac_copy_vars
-}
-
-# This function operates on a single variable in a file, changing all
-# places where that variable is missing to some new (non-missing)
-# value. The _FillValue attribute remains unchanged.
-# Usage: change_missing_to_value varname newval infile outfile
-# - varname: the name of the variable to change
-# - newval: all instances of the missing value will be replaced with
-# this new value
-# - infile: input file name
-# - outfile: output file name (can be the same as infile)
-function change_missing_to_value {
- if [[ $# -ne 4 ]]; then
- echo "ERROR in change_missing_to_value: wrong number of arguments: expected 2, received $#"
- exit 1
- fi
-
- varname=$1
- newval=$2
- infile=$3
- outfile=$4
-
- varname_tmp=${varname}_tmp_$$
-
- cat > cmds.nco.tmp.$$ <= $pftmask_min)' $output_file $output_file" 0
- do_cmd "ncks -O -x -v pftmask_float $output_file $output_file" 0
-
- # --- Calculate landmask from landfrac ---
- echo ""
-
- cat > cmds.nco.tmp.$$ < 0);
-landmask_float.change_miss($landmask_fill);
-landmask=int(landmask_float);
-EOF
-
- do_cmd "ncap2 -O -S cmds.nco.tmp.$$ $output_file $output_file" 0
- rm cmds.nco.tmp.$$
-
- change_missing_to_value landmask 0 $output_file $output_file
-
- # in the following, note that we need to manually set missing_value, because it doesn't get changed through the .set_miss call in nco:
- do_cmd "ncatted -a long_name,landmask,o,c,'land/ocean mask (0.=ocean and 1.=land)' -a missing_value,landmask,o,i,$landmask_fill $output_file" 0
-fi
-
-echo "Successfully regridded data"
diff --git a/tools/mkprocdata_map/src/Filepath b/tools/mkprocdata_map/src/Filepath
deleted file mode 100644
index 9c558e357c..0000000000
--- a/tools/mkprocdata_map/src/Filepath
+++ /dev/null
@@ -1 +0,0 @@
-.
diff --git a/tools/mkprocdata_map/src/Makefile b/tools/mkprocdata_map/src/Makefile
deleted file mode 100644
index 42f797b3c2..0000000000
--- a/tools/mkprocdata_map/src/Makefile
+++ /dev/null
@@ -1,10 +0,0 @@
-# Makefile for mkprocdata_map
-
-EXENAME = ../mkprocdata_map
-
-# Set optimization on by default
-ifeq ($(OPT),$(null))
- OPT := TRUE
-endif
-
-include Makefile.common
diff --git a/tools/mkprocdata_map/src/Makefile.common b/tools/mkprocdata_map/src/Makefile.common
deleted file mode 100644
index ab79f94144..0000000000
--- a/tools/mkprocdata_map/src/Makefile.common
+++ /dev/null
@@ -1,360 +0,0 @@
-#-----------------------------------------------------------------------
-# This Makefile is for building clm tools on AIX, Linux (with pgf90 or
-# lf95 compiler), Darwin or IRIX platforms.
-#
-# These macros can be changed by setting environment variables:
-#
-# LIB_NETCDF --- Library directory location of netcdf. (defaults to /usr/local/lib)
-# INC_NETCDF --- Include directory location of netcdf. (defaults to /usr/local/include)
-# MOD_NETCDF --- Module directory location of netcdf. (defaults to $LIB_NETCDF)
-# USER_FC ------ Allow user to override the default Fortran compiler specified in Makefile.
-# USER_FCTYP --- Allow user to override the default type of Fortran compiler (linux and USER_FC=ftn only).
-# USER_CC ------ Allow user to override the default C compiler specified in Makefile (linux only).
-# USER_LINKER -- Allow user to override the default linker specified in Makefile.
-# USER_CPPDEFS - Additional CPP defines.
-# USER_CFLAGS -- Additional C compiler flags that the user wishes to set.
-# USER_FFLAGS -- Additional Fortran compiler flags that the user wishes to set.
-# USER_LDLAGS -- Additional load flags that the user wishes to set.
-# SMP ---------- Shared memory Multi-processing (TRUE or FALSE) [default is FALSE]
-# OPT ---------- Use optimized options.
-#
-#------------------------------------------------------------------------
-
-# Set up special characters
-null :=
-
-# Newer makes set the CURDIR variable.
-CURDIR := $(shell pwd)
-
-RM = rm
-
-# Check for the netcdf library and include directories
-ifeq ($(LIB_NETCDF),$(null))
- LIB_NETCDF := /usr/local/lib
-endif
-
-ifeq ($(INC_NETCDF),$(null))
- INC_NETCDF := /usr/local/include
-endif
-
-ifeq ($(MOD_NETCDF),$(null))
- MOD_NETCDF := $(LIB_NETCDF)
-endif
-
-# Set user specified Fortran compiler
-ifneq ($(USER_FC),$(null))
- FC := $(USER_FC)
-endif
-
-# Set user specified C compiler
-ifneq ($(USER_CC),$(null))
- CC := $(USER_CC)
-endif
-
-# Set if Shared memory multi-processing will be used
-ifeq ($(SMP),$(null))
- SMP := FALSE
-endif
-
-CPPDEF := $(USER_CPPDEFS)
-
-# Set optimization on by default
-ifeq ($(OPT),$(null))
- OPT := TRUE
-endif
-
-ifeq ($(OPT),TRUE)
- CPPDEF := -DOPT
-endif
-
-# Determine platform
-UNAMES := $(shell uname -s)
-
-# Load dependency search path.
-dirs := . $(shell cat Filepath)
-
-# Set cpp search path, include netcdf
-cpp_dirs := $(dirs) $(INC_NETCDF) $(MOD_NETCDF)
-cpp_path := $(foreach dir,$(cpp_dirs),-I$(dir)) # format for command line
-
-# Expand any tildes in directory names. Change spaces to colons.
-# (the vpath itself is set elsewhere, based on this variable)
-vpath_dirs := $(foreach dir,$(cpp_dirs),$(wildcard $(dir)))
-vpath_dirs := $(subst $(space),:,$(vpath_dirs))
-
-#Primary Target: build the tool
-all: $(EXENAME)
-
-# Get list of files and build dependency file for all .o files
-# using perl scripts mkSrcfiles and mkDepends
-
-SOURCES := $(shell cat Srcfiles)
-
-OBJS := $(addsuffix .o, $(basename $(SOURCES)))
-
-# Set path to Mkdepends script; assumes that any Makefile including
-# this file is in a sibling of the src directory, in which Mkdepends
-# resides
-Mkdepends := ../src/Mkdepends
-
-$(CURDIR)/Depends: $(CURDIR)/Srcfiles $(CURDIR)/Filepath
- $(Mkdepends) Filepath Srcfiles > $@
-
-
-# Architecture-specific flags and rules
-#------------------------------------------------------------------------
-# AIX
-#------------------------------------------------------------------------
-
-ifeq ($(UNAMES),AIX)
-CPPDEF += -DAIX
-cpre = $(null)-WF,-D$(null)
-FPPFLAGS := $(patsubst -D%,$(cpre)%,$(CPPDEF))
-FFLAGS = -c -I$(INC_NETCDF) -q64 -qsuffix=f=f90 -qsuffix=f=f90:cpp=F90 \
- $(FPPFLAGS) -g -qfullpath -qarch=auto -qtune=auto -qsigtrap=xl__trcedump -qsclk=micro
-
-LDFLAGS = -L$(LIB_NETCDF) -q64 -lnetcdff -lnetcdf
-ifneq ($(OPT),TRUE)
- FFLAGS += -qinitauto=7FF7FFFF -qflttrap=ov:zero:inv:en -qspillsize=4000 -C
-else
- FFLAGS += -O2 -qmaxmem=-1 -Q
- LDFLAGS += -Q
-endif
-CFLAGS := -q64 -g $(CPPDEF) -O2
-FFLAGS += $(cpp_path)
-CFLAGS += $(cpp_path)
-
-ifeq ($(SMP),TRUE)
- FC = xlf90_r
- FFLAGS += -qsmp=omp
- LDFLAGS += -qsmp=omp
-else
- FC = xlf90
-endif
-
-endif
-
-#------------------------------------------------------------------------
-# Darwin
-#------------------------------------------------------------------------
-
-ifeq ($(UNAMES),Darwin)
-
-# Set the default Fortran compiler
-ifeq ($(USER_FC),$(null))
- FC := g95
-endif
-ifeq ($(USER_CC),$(null))
- CC := gcc
-endif
-
-CFLAGS := -g -O2
-CPPDEF += -DSYSDARWIN -DDarwin -DLINUX
-LDFLAGS :=
-
-ifeq ($(FC),g95)
-
- CPPDEF += -DG95
- FFLAGS := -c -fno-second-underscore $(CPPDEF) $(cpp_path) -I$(MOD_NETCDF)
- ifeq ($(OPT),TRUE)
- FFLAGS += -O2
- else
- FFLAGS += -g -fbounds-check
- endif
-
-endif
-
-ifeq ($(FC),gfortran)
-
- CPPDEF += -DG95
- FFLAGS := -c -fno-second-underscore $(CPPDEF) $(cpp_path) -I$(MOD_NETCDF) \
- -fno-range-check
- ifeq ($(OPT),TRUE)
- FFLAGS += -O2
- else
- FFLAGS += -g -fbounds-check
- endif
-
-endif
-
-ifeq ($(FC),ifort)
-
- CPPDEF += -DFORTRANUNDERSCORE
- FFLAGS += -c -ftz -g -fp-model precise $(CPPDEF) $(cpp_path) \
- -convert big_endian -assume byterecl -traceback -FR
- LDFLAGS += -m64
-
- ifneq ($(OPT),TRUE)
- FFLAGS += -CB -O0
- else
- FFLAGS += -O2
- endif
- ifeq ($(SMP),TRUE)
- FFLAGS += -qopenmp
- LDFLAGS += -qopenmp
- endif
-endif
-
-ifeq ($(FC),pgf90)
-
- CPPDEF += -DFORTRANUNDERSCORE
- FFLAGS += -c $(CPPDEF) $(cpp_path)
- ifneq ($(OPT),TRUE)
- FFLAGS += -g -Ktrap=fp -Mbounds -Kieee
- else
- FFLAGS += -fast -Kieee
- endif
-
- ifeq ($(SMP),TRUE)
- FFLAGS += -mp
- LDFLAGS += -mp
- endif
-
-endif
-
-ifeq ($(CC),icc)
- CFLAGS += -m64 -g
- ifeq ($(SMP),TRUE)
- CFLAGS += -qopenmp
- endif
-endif
-ifeq ($(CC),pgcc)
- CFLAGS += -g -fast
-endif
-
-CFLAGS += $(CPPDEF) $(cpp_path)
-LDFLAGS += -L$(LIB_NETCDF) -lnetcdf -lnetcdff
-
-endif
-
-#------------------------------------------------------------------------
-# Linux
-#------------------------------------------------------------------------
-
-ifeq ($(UNAMES),Linux)
- ifeq ($(USER_FC),$(null))
- FC := ifort
- FCTYP := ifort
- else
- ifeq ($(USER_FC),ftn)
- ifneq ($(USER_FCTYP),$(null))
- FCTYP := $(USER_FCTYP)
- else
- FCTYP := pgf90
- endif
- else
- FCTYP := $(USER_FC)
- endif
- endif
- CPPDEF += -DLINUX -DFORTRANUNDERSCORE
- CFLAGS := $(CPPDEF)
- LDFLAGS := $(shell $(LIB_NETCDF)/../bin/nf-config --flibs)
- FFLAGS =
-
- ifeq ($(FCTYP),pgf90)
- CC := pgcc
- ifneq ($(OPT),TRUE)
- FFLAGS += -g -Ktrap=fp -Mbounds -Kieee
- else
- FFLAGS += -fast -Kieee
- CFLAGS += -fast
- endif
-
- ifeq ($(SMP),TRUE)
- FFLAGS += -mp
- LDFLAGS += -mp
- endif
-
- endif
-
- ifeq ($(FCTYP),lf95)
- ifneq ($(OPT),TRUE)
- FFLAGS += -g --chk a,e,s,u -O0
- else
- FFLAGS += -O
- endif
- # Threading only works by putting thread memory on the heap rather than the stack
- # (--threadheap).
- # As of lf95 version 6.2 the thread stacksize limits are (still) too small to run
- # even small
- # resolution problems (FV at 10x15 res fails).
- ifeq ($(SMP),TRUE)
- FFLAGS += --openmp --threadheap 4096
- LDFLAGS += --openmp --threadheap 4096
- endif
- endif
- ifeq ($(FCTYP),pathf90)
- FFLAGS += -extend_source -ftpp -fno-second-underscore
- ifneq ($(OPT),TRUE)
- FFLAGS += -g -O0
- else
- FFLAGS += -O
- endif
- ifeq ($(SMP),TRUE)
- FFLAGS += -mp
- LDFLAGS += -mp
- endif
- endif
- ifeq ($(FCTYP),ifort)
-
- FFLAGS += -ftz -g -fp-model precise -convert big_endian -assume byterecl -traceback -FR
- CFLAGS += -m64 -g
- LDFLAGS += -m64
-
- ifneq ($(OPT),TRUE)
- FFLAGS += -CB -O0
- else
- FFLAGS += -O2
- endif
- ifeq ($(SMP),TRUE)
- FFLAGS += -qopenmp
- CFLAGS += -qopenmp
- LDFLAGS += -qopenmp
- endif
- endif
- FFLAGS += -c -I$(INC_NETCDF) $(CPPDEF) $(cpp_path)
- CFLAGS += $(cpp_path)
-endif
-
-#------------------------------------------------------------------------
-# Default rules and macros
-#------------------------------------------------------------------------
-
-.SUFFIXES:
-.SUFFIXES: .F90 .c .o
-
-# Set the vpath for all file types EXCEPT .o
-# We do this for individual file types rather than generally using
-# VPATH, because for .o files, we don't want to use files from a
-# different build (e.g., in building the unit tester, we don't want to
-# use .o files from the main build)
-vpath %.F90 $(vpath_dirs)
-vpath %.c $(vpath_dirs)
-vpath %.h $(vpath_dirs)
-
-# Append user defined compiler and load flags to Makefile defaults
-CFLAGS += $(USER_CFLAGS)
-FFLAGS += $(USER_FFLAGS)
-LDFLAGS += $(USER_LDFLAGS)
-
-# Set user specified linker
-ifneq ($(USER_LINKER),$(null))
- LINKER := $(USER_LINKER)
-else
- LINKER := $(FC)
-endif
-
-.F90.o:
- $(FC) $(FFLAGS) $<
-
-.c.o:
- $(CC) -c $(CFLAGS) $<
-
-
-$(EXENAME): $(OBJS)
- $(LINKER) -o $@ $(OBJS) $(LDFLAGS)
-
-clean:
- $(RM) -f $(OBJS) *.mod Depends
-
-include $(CURDIR)/Depends
diff --git a/tools/mkprocdata_map/src/Mkdepends b/tools/mkprocdata_map/src/Mkdepends
deleted file mode 100755
index a75e8fdde0..0000000000
--- a/tools/mkprocdata_map/src/Mkdepends
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env perl
-
-# Generate dependencies in a form suitable for inclusion into a Makefile.
-# The source filenames are provided in a file, one per line. Directories
-# to be searched for the source files and for their dependencies are provided
-# in another file, one per line. Output is written to STDOUT.
-#
-# For CPP type dependencies (lines beginning with #include) the dependency
-# search is recursive. Only dependencies that are found in the specified
-# directories are included. So, for example, the standard include file
-# stdio.h would not be included as a dependency unless /usr/include were
-# one of the specified directories to be searched.
-#
-# For Fortran module USE dependencies (lines beginning with a case
-# insensitive "USE", possibly preceded by whitespace) the Fortran compiler
-# must be able to access the .mod file associated with the .o file that
-# contains the module. In order to correctly generate these dependencies
-# two restrictions must be observed.
-# 1) All modules must be contained in files that have the same base name as
-# the module, in a case insensitive sense. This restriction implies that
-# there can only be one module per file.
-# 2) All modules that are to be contained in the dependency list must be
-# contained in one of the source files in the list provided on the command
-# line.
-# The reason for the second restriction is that since the makefile doesn't
-# contain rules to build .mod files the dependency takes the form of the .o
-# file that contains the module. If a module is being used for which the
-# source code is not available (e.g., a module from a library), then adding
-# a .o dependency for that module is a mistake because make will attempt to
-# build that .o file, and will fail if the source code is not available.
-#
-# Author: B. Eaton
-# Climate Modelling Section, NCAR
-# Feb 2001
-
-use Getopt::Std;
-use File::Basename;
-
-# Check for usage request.
-@ARGV >= 2 or usage();
-
-# Process command line.
-my %opt = ();
-getopts( "t:w", \%opt ) or usage();
-my $filepath_arg = shift() or usage();
-my $srcfile_arg = shift() or usage();
-@ARGV == 0 or usage(); # Check that all args were processed.
-
-my $obj_dir;
-if ( defined $opt{'t'} ) { $obj_dir = $opt{'t'}; }
-
-open(FILEPATH, $filepath_arg) or die "Can't open $filepath_arg: $!\n";
-open(SRCFILES, $srcfile_arg) or die "Can't open $srcfile_arg: $!\n";
-
-# Make list of paths to use when looking for files.
-# Prepend "." so search starts in current directory. This default is for
-# consistency with the way GNU Make searches for dependencies.
-my @file_paths = ;
-close(FILEPATH);
-chomp @file_paths;
-unshift(@file_paths,'.');
-foreach $dir (@file_paths) { # (could check that directories exist here)
- $dir =~ s!/?\s*$!!; # remove / and any whitespace at end of directory name
- ($dir) = glob $dir; # Expand tildes in path names.
-}
-
-# Make list of files containing source code.
-my @src = ;
-close(SRCFILES);
-chomp @src;
-
-# For each file that may contain a Fortran module (*.[fF]90 *.[fF]) convert the
-# file's basename to uppercase and use it as a hash key whose value is the file's
-# basename. This allows fast identification of the files that contain modules.
-# The only restriction is that the file's basename and the module name must match
-# in a case insensitive way.
-my %module_files = ();
-my ($f, $name, $path, $suffix, $mod);
-my @suffixes = ('\.[fF]90', '\.[fF]' );
-foreach $f (@src) {
- ($name, $path, $suffix) = fileparse($f, @suffixes);
- ($mod = $name) =~ tr/a-z/A-Z/;
- $module_files{$mod} = $name;
-}
-
-# Now make a list of .mod files in the file_paths. If a .o source dependency
-# can't be found based on the module_files list above, then maybe a .mod
-# module dependency can if the mod file is visible.
-my %trumod_files = ();
-my ($dir);
-my ($f, $name, $path, $suffix, $mod);
-my @suffixes = ('\.mod' );
-foreach $dir (@file_paths) {
- @filenames = (glob("$dir/*.mod"));
- foreach $f (@filenames) {
- ($name, $path, $suffix) = fileparse($f, @suffixes);
- ($mod = $name) =~ tr/a-z/A-Z/;
- $trumod_files{$mod} = $name;
- }
-}
-
-#print STDERR "\%module_files\n";
-#while ( ($k,$v) = each %module_files ) {
-# print STDERR "$k => $v\n";
-#}
-
-# Find module and include dependencies of the source files.
-my ($file_path, $rmods, $rincs);
-my %file_modules = ();
-my %file_includes = ();
-my @check_includes = ();
-foreach $f ( @src ) {
-
- # Find the file in the seach path (@file_paths).
- unless ($file_path = find_file($f)) {
- if (defined $opt{'w'}) {print STDERR "$f not found\n";}
- next;
- }
-
- # Find the module and include dependencies.
- ($rmods, $rincs) = find_dependencies( $file_path );
-
- # Remove redundancies (a file can contain multiple procedures that have
- # the same dependencies).
- $file_modules{$f} = rm_duplicates($rmods);
- $file_includes{$f} = rm_duplicates($rincs);
-
- # Make a list of all include files.
- push @check_includes, @{$file_includes{$f}};
-}
-
-#print STDERR "\%file_modules\n";
-#while ( ($k,$v) = each %file_modules ) {
-# print STDERR "$k => @$v\n";
-#}
-#print STDERR "\%file_includes\n";
-#while ( ($k,$v) = each %file_includes ) {
-# print STDERR "$k => @$v\n";
-#}
-#print STDERR "\@check_includes\n";
-#print STDERR "@check_includes\n";
-
-# Find include file dependencies.
-my %include_depends = ();
-while (@check_includes) {
- $f = shift @check_includes;
- if (defined($include_depends{$f})) { next; }
-
- # Mark files not in path so they can be removed from the dependency list.
- unless ($file_path = find_file($f)) {
- $include_depends{$f} = -1;
- next;
- }
-
- # Find include file dependencies.
- ($rmods, $include_depends{$f}) = find_dependencies($file_path);
-
- # Add included include files to the back of the check_includes list so
- # that their dependencies can be found.
- push @check_includes, @{$include_depends{$f}};
-
- # Add included modules to the include_depends list.
- if ( @$rmods ) { push @{$include_depends{$f}}, @$rmods; }
-}
-
-#print STDERR "\%include_depends\n";
-#while ( ($k,$v) = each %include_depends ) {
-# print STDERR (ref $v ? "$k => @$v\n" : "$k => $v\n");
-#}
-
-# Remove include file dependencies that are not in the Filepath.
-my $i, $ii;
-foreach $f (keys %include_depends) {
-
- unless (ref $include_depends{$f}) { next; }
- $rincs = $include_depends{$f};
- unless (@$rincs) { next; }
- $ii = 0;
- $num_incs = @$rincs;
- for ($i = 0; $i < $num_incs; ++$i) {
- if ($include_depends{$$rincs[$ii]} == -1) {
- splice @$rincs, $ii, 1;
- next;
- }
- ++$ii;
- }
-}
-
-# Substitute the include file dependencies into the %file_includes lists.
-foreach $f (keys %file_includes) {
- my @expand_incs = ();
-
- # Initialize the expanded %file_includes list.
- my $i;
- unless (@{$file_includes{$f}}) { next; }
- foreach $i (@{$file_includes{$f}}) {
- push @expand_incs, $i unless ($include_depends{$i} == -1);
- }
- unless (@expand_incs) {
- $file_includes{$f} = [];
- next;
- }
-
- # Expand
- for ($i = 0; $i <= $#expand_incs; ++$i) {
- push @expand_incs, @{ $include_depends{$expand_incs[$i]} };
- }
-
- $file_includes{$f} = rm_duplicates(\@expand_incs);
-}
-
-#print STDERR "expanded \%file_includes\n";
-#while ( ($k,$v) = each %file_includes ) {
-# print STDERR "$k => @$v\n";
-#}
-
-# Print dependencies to STDOUT.
-foreach $f (sort keys %file_modules) {
- $f =~ /(.+)\./;
- $target = "$1.o";
- if ( defined $opt{'t'} ) { $target = "$opt{'t'}/$1.o"; }
- print "$target : $f @{$file_modules{$f}} @{$file_includes{$f}}\n";
-}
-
-#--------------------------------------------------------------------------------------
-
-sub find_dependencies {
-
- # Find dependencies of input file.
- # Use'd Fortran 90 modules are returned in \@mods.
- # Files that are "#include"d by the cpp preprocessor are returned in \@incs.
-
- my( $file ) = @_;
- my( @mods, @incs );
-
- open(FH, $file) or die "Can't open $file: $!\n";
-
- while ( ) {
- # Search for "#include" and strip filename when found.
- if ( /^#include\s+[<"](.*)[>"]/ ) {
- push @incs, $1;
- }
- # Search for Fortran include dependencies.
- elsif ( /^\s*include\s+['"](.*)['"]/ ) { #" for emacs fontlock
- push @incs, $1;
- }
- # Search for module dependencies.
- elsif ( /^\s*USE\s+(\w+)/i ) {
- ($module = $1) =~ tr/a-z/A-Z/;
- # Return dependency in the form of a .o version of the file that contains
- # the module. this is from the source list.
- if ( defined $module_files{$module} ) {
- if ( defined $obj_dir ) {
- push @mods, "$obj_dir/$module_files{$module}.o";
- } else {
- push @mods, "$module_files{$module}.o";
- }
- }
- # Return dependency in the form of a .mod version of the file that contains
- # the module. this is from the .mod list. only if .o version not found
- elsif ( defined $trumod_files{$module} ) {
- if ( defined $obj_dir ) {
- push @mods, "$obj_dir/$trumod_files{$module}.mod";
- } else {
- push @mods, "$trumod_files{$module}.mod";
- }
- }
- }
- }
- close( FH );
- return (\@mods, \@incs);
-}
-
-#--------------------------------------------------------------------------------------
-
-sub find_file {
-
-# Search for the specified file in the list of directories in the global
-# array @file_paths. Return the first occurance found, or the null string if
-# the file is not found.
-
- my($file) = @_;
- my($dir, $fname);
-
- foreach $dir (@file_paths) {
- $fname = "$dir/$file";
- if ( -f $fname ) { return $fname; }
- }
- return ''; # file not found
-}
-
-#--------------------------------------------------------------------------------------
-
-sub rm_duplicates {
-
-# Return a list with duplicates removed.
-
- my ($in) = @_; # input arrary reference
- my @out = ();
- my $i;
- my %h = ();
- foreach $i (@$in) {
- $h{$i} = '';
- }
- @out = keys %h;
- return \@out;
-}
-
-#--------------------------------------------------------------------------------------
-
-sub usage {
- ($ProgName = $0) =~ s!.*/!!; # name of program
- die < shr_kind_r8
- implicit none
- save
-
- real(R8),parameter :: SHR_CONST_REARTH = 6.37122e6_R8 ! radius of earth ~ m
- real(r8),parameter :: re_km = SHR_CONST_REARTH*0.001 ! radius of earth (km)
-
-end module constMod
diff --git a/tools/mkprocdata_map/src/fileutils.F90 b/tools/mkprocdata_map/src/fileutils.F90
deleted file mode 100644
index e1f8e633da..0000000000
--- a/tools/mkprocdata_map/src/fileutils.F90
+++ /dev/null
@@ -1,282 +0,0 @@
-module fileutils
-
-!-----------------------------------------------------------------------
-!BOP
-!
-! !MODULE: fileutils
-!
-! !DESCRIPTION:
-! Module containing file I/O utilities
-!
-! !USES:
-!
-! !PUBLIC TYPES:
- implicit none
- save
-!
-! !PUBLIC MEMBER FUNCTIONS:
- public :: get_filename !Returns filename given full pathname
- public :: opnfil !Open local unformatted or formatted file
- public :: getfil !Obtain local copy of file
- public :: relavu !Close and release Fortran unit no longer in use
- public :: getavu !Get next available Fortran unit number
-!
-! !REVISION HISTORY:
-! Created by Mariana Vertenstein
-!
-!
-! !PRIVATE MEMBER FUNCTIONS: None
-!EOP
-!-----------------------------------------------------------------------
-
-contains
-
-!-----------------------------------------------------------------------
-!BOP
-!
-! !IROUTINE: get_filename
-!
-! !INTERFACE:
- character(len=256) function get_filename (fulpath)
-!
-! !DESCRIPTION:
-! Returns filename given full pathname
-!
-! !ARGUMENTS:
- implicit none
- character(len=*), intent(in) :: fulpath !full pathname
-!
-! !REVISION HISTORY:
-! Created by Mariana Vertenstein
-!
-!
-! !LOCAL VARIABLES:
-!EOP
- integer i !loop index
- integer klen !length of fulpath character string
-!------------------------------------------------------------------------
-
- klen = len_trim(fulpath)
- do i = klen, 1, -1
- if (fulpath(i:i) == '/') go to 10
- end do
- i = 0
-10 get_filename = fulpath(i+1:klen)
-
- end function get_filename
-
-!------------------------------------------------------------------------
-!BOP
-!
-! !IROUTINE: set_filename
-!
-! !INTERFACE:
- character(len=256) function set_filename (rem_dir, loc_fn)
-!
-! !DESCRIPTION:
-!
-! !ARGUMENTS:
-!
- implicit none
- character(len=*), intent(in) :: rem_dir !remote directory
- character(len=*), intent(in) :: loc_fn !local full path filename
-!
-! !REVISION HISTORY:
-! Created by Mariana Vertenstein
-!
-!
-! !LOCAL VARIABLES:
-!EOP
- integer :: i !integer
-!------------------------------------------------------------------------
-
- set_filename = ' '
- do i = len_trim(loc_fn), 1, -1
- if (loc_fn(i:i)=='/') go to 10
- end do
- i = 0
-10 set_filename = trim(rem_dir) // loc_fn(i+1:len_trim(loc_fn))
-
- end function set_filename
-
-!------------------------------------------------------------------------
-!BOP
-!
-! !IROUTINE: getfil
-!
-! !INTERFACE:
- subroutine getfil (fulpath, locfn, iflag)
-!
-! !DESCRIPTION:
-! Obtain local copy of file
-! First check current working directory
-! Next check full pathname[fulpath] on disk
-! Finally check full pathname[fulpath] on archival system
-!
-! !USES:
-!
-! !ARGUMENTS:
- implicit none
- character(len=*), intent(in) :: fulpath !Archival or permanent disk full pathname
- character(len=*), intent(out) :: locfn !output local file name
- integer, optional, intent(in) :: iflag !0=>abort if file not found 1=>do not abort
-!
-! !REVISION HISTORY:
-! Created by Mariana Vertenstein
-!
-!
-! !LOCAL VARIABLES:
-!EOP
- integer i !loop index
- integer klen !length of fulpath character string
- integer ierr !error status
- logical lexist !true if local file exists
- character(len=len(fulpath)+5) :: fulpath2 !Archival full pathname
-!------------------------------------------------------------------------
-
- ! get local file name from full name: start at end. look for first "/"
-
- klen = len_trim(fulpath)
- do i = klen, 1, -1
- if (fulpath(i:i).eq.'/') go to 100
- end do
- i = 0
-100 locfn = fulpath(i+1:klen)
- if (len_trim(locfn) == 0) then
- write(6,*)'(GETFIL): local filename has zero length'
- stop 1
- else
- write(6,*)'(GETFIL): attempting to find local file ',trim(locfn)
- endif
-
- ! first check if file is in current working directory.
-
- inquire (file=locfn,exist=lexist)
- if (lexist) then
- write(6,*) '(GETFIL): using ',trim(locfn),' in current working directory'
- RETURN
- endif
-
- ! second check for full pathname on disk
-
- inquire(file=fulpath, exist=lexist)
- if (lexist) then
- locfn = trim(fulpath)
- write(6,*) '(GETFIL): using ',trim(fulpath)
- RETURN
- else
- write(6,*) 'GETFIL: FAILED to get '//trim(fulpath)
- stop 1
- end if
-
- end subroutine getfil
-
-!------------------------------------------------------------------------
-!BOP
-!
-! !IROUTINE: opnfil
-!
-! !INTERFACE:
- subroutine opnfil (locfn, iun, form)
-!
-! !DESCRIPTION:
-! Open file locfn in unformatted or formatted form on unit iun
-!
-! !ARGUMENTS:
-!
- implicit none
- character(len=*), intent(in):: locfn !file name
- integer, intent(in):: iun !fortran unit number
- character(len=1), intent(in):: form !file format: u = unformatted,
- !f = formatted
-!
-! !REVISION HISTORY:
-! Created by Mariana Vertenstein
-!
-!
-! !LOCAL VARIABLES:
-!EOP
- integer ioe !error return from fortran open
- character(len=11) ft !format type: formatted. unformatted
-!------------------------------------------------------------------------
-
- if (len_trim(locfn) == 0) then
- write(6,*)'OPNFIL: local filename has zero length'
- stop 1
- endif
- if (form=='u' .or. form=='U') then
- ft = 'unformatted'
- else
- ft = 'formatted '
- end if
- open (unit=iun,file=locfn,status='unknown',form=ft,iostat=ioe)
- if (ioe /= 0) then
- write(6,*)'(OPNFIL): failed to open file ',trim(locfn), &
- & ' on unit ',iun,' ierr=',ioe
- stop 1
- else
- write(6,*)'(OPNFIL): Successfully opened file ',trim(locfn),' on unit= ',iun
- end if
-
- end subroutine opnfil
-
-!------------------------------------------------------------------------
-!BOP
-!
-! !IROUTINE: getavu
-!
-! !INTERFACE:
- integer function getavu()
-!
-! !DESCRIPTION:
-! Get next available Fortran unit number.
-!
-! !USES:
- use shr_file_mod, only : shr_file_getUnit
-!
-! !ARGUMENTS:
- implicit none
-!
-! !REVISION HISTORY:
-! Created by Gordon Bonan
-! Modified for clm2 by Mariana Vertenstein
-!
-!
-! !LOCAL VARIABLES:
-!EOP
-!------------------------------------------------------------------------
-
- getavu = shr_file_getunit()
-
- end function getavu
-
-!------------------------------------------------------------------------
-!BOP
-!
-! !IROUTINE: relavu
-!
-! !INTERFACE:
- subroutine relavu (iunit)
-!
-! !DESCRIPTION:
-! Close and release Fortran unit no longer in use!
-!
-! !USES:
- use shr_file_mod, only : shr_file_freeUnit
-!
-! !ARGUMENTS:
- implicit none
- integer, intent(in) :: iunit !Fortran unit number
-!
-! !REVISION HISTORY:
-! Created by Gordon Bonan
-!
-!EOP
-!------------------------------------------------------------------------
-
- close(iunit)
- call shr_file_freeUnit(iunit)
-
- end subroutine relavu
-
-end module fileutils
diff --git a/tools/mkprocdata_map/src/fmain.F90 b/tools/mkprocdata_map/src/fmain.F90
deleted file mode 100644
index ba9e593c1d..0000000000
--- a/tools/mkprocdata_map/src/fmain.F90
+++ /dev/null
@@ -1,78 +0,0 @@
-program fmain
-
- use mkprocdata_map, only : mkmap
- implicit none
-
- character(len= 256) :: arg
- integer :: n !index
- integer :: nargs !number of arguments
- integer, external :: iargc !number of arguments function
- character(len=256) :: filei !input file
- character(len=256) :: fileo !output mapped file
- character(len=256) :: fmap !maping file
- character(len=256) :: ftemplate !template file, containing lat & lon arrays desired in output file
- character(len=256) :: cmdline !input command line
- integer, parameter :: inival = -999 !initial value for command-line integers
- !----------------------------------------------------
-
- filei = ' '
- fileo = ' '
- fmap = ' '
- ftemplate = ' '
-
- cmdline = 'mkprocdata_map'
- nargs = iargc()
- n = 1
- do while (n <= nargs)
- arg = ' '
- call getarg (n, arg)
- n = n + 1
-
- select case (arg)
- case ('-i')
- call getarg (n, arg)
- n = n + 1
- filei = trim(arg)
- cmdline = trim(cmdline) // ' -i ' // trim(arg)
- case ('-o')
- call getarg (n, arg)
- n = n + 1
- fileo = trim(arg)
- cmdline = trim(cmdline) // ' -o ' // trim(arg)
- case ('-m')
- call getarg (n, arg)
- n = n + 1
- fmap = trim(arg)
- cmdline = trim(cmdline) // ' -m ' // trim(arg)
- case ('-t')
- call getarg (n, arg)
- n = n + 1
- ftemplate = trim(arg)
- cmdline = trim(cmdline) // ' -t ' // trim(arg)
- case default
- write (6,*) 'Argument ', arg,' is not known'
- call usage_exit (' ')
- cmdline = trim(cmdline) // ' ' // trim(arg)
- end select
- end do
-
- if (filei == ' ' .or. fileo == ' ' .or. fmap == ' ' &
- .or. ftemplate == ' ') then
- call usage_exit ('Must specify all the following arguments')
- end if
-
- call mkmap (filei, fileo, fmap, ftemplate)
-
-end program fmain
-
-
-subroutine usage_exit (arg)
- implicit none
- character(len=*) :: arg
- if (arg /= ' ') write (6,*) arg
- write (6,*) 'Usage: mkprocdata_map -i -o