From 9724557ae4e9d2592395a84f90c585d217cd56e6 Mon Sep 17 00:00:00 2001 From: Teagan King Date: Wed, 1 May 2024 11:31:35 -0600 Subject: [PATCH 1/2] plumber scripts move, ran black & pylint --- .../plumber2_surf_wrapper.py | 183 +++++++++++++++++ .../site_and_regional/plumber2_usermods.py | 191 ++++++++++++++++++ .../test/test_unit_plumber2_surf_wrapper.py | 39 ++++ tools/site_and_regional/plumber2_surf_wrapper | 35 ++++ tools/site_and_regional/plumber2_usermods | 32 +++ 5 files changed, 480 insertions(+) create mode 100755 python/ctsm/site_and_regional/plumber2_surf_wrapper.py create mode 100644 python/ctsm/site_and_regional/plumber2_usermods.py create mode 100755 python/ctsm/test/test_unit_plumber2_surf_wrapper.py create mode 100755 tools/site_and_regional/plumber2_surf_wrapper create mode 100755 tools/site_and_regional/plumber2_usermods diff --git a/python/ctsm/site_and_regional/plumber2_surf_wrapper.py b/python/ctsm/site_and_regional/plumber2_surf_wrapper.py new file mode 100755 index 0000000000..022914d17e --- /dev/null +++ b/python/ctsm/site_and_regional/plumber2_surf_wrapper.py @@ -0,0 +1,183 @@ +#! /usr/bin/env python3 +""" +|------------------------------------------------------------------| +|--------------------- Instructions -----------------------------| +|------------------------------------------------------------------| +This script is a simple wrapper for neon sites that performs the +following: + 1) For neon sites, subset surface dataset from global dataset + (i.e. ./subset_data.py ) + 2) Download neon and update the created surface dataset + based on the downloaded neon data. + (i.e. modify_singlept_site_neon.py) + +Instructions for running using conda python environments: + +../../py_env_create +conda activate ctsm_py + +""" +# Import libraries +from __future__ import print_function + +import argparse +import logging +import os +import subprocess +import tqdm + +import pandas as pd + + +def get_parser(): + """ + Get parser object for this script. + """ + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.print_usage = parser.print_help + + parser.add_argument( + "-v", + "--verbose", + help="Verbose mode will print more information. ", + action="store_true", + dest="verbose", + default=False, + ) + + parser.add_argument( + "--16pft", + help="Create and/or modify 16-PFT surface datasets (e.g. for a FATES run) ", + action="store_true", + dest="pft_16", + default=True, + ) + + return parser + + +def execute(command): + """ + Function for running a command on shell. + Args: + command (str): + command that we want to run. + Raises: + Error with the return code from shell. + """ + print("\n", " >> ", *command, "\n") + + try: + subprocess.check_call(command, stdout=open(os.devnull, "w"), stderr=subprocess.STDOUT) + + except subprocess.CalledProcessError as err: + # raise RuntimeError("command '{}' return with error + # (code {}): {}".format(e.cmd, e.returncode, e.output)) + # print (e.ouput) + print(err) + + +def main(): + """ + Read plumber2_sites from csv, iterate through sites, and add dominant PFT + """ + + args = get_parser().parse_args() + + if args.verbose: + logging.basicConfig(level=logging.DEBUG) + + plumber2_sites = pd.read_csv("PLUMBER2_sites.csv", skiprows=4) + + for _, row in tqdm.tqdm(plumber2_sites.iterrows()): + lat = row["Lat"] + lon = row["Lon"] + site = row["Site"] + pft1 = row["pft1"] + pctpft1 = row["pft1-%"] + cth1 = row["pft1-cth"] + cbh1 = row["pft1-cbh"] + pft2 = row["pft2"] + pctpft2 = row["pft2-%"] + cth2 = row["pft2-cth"] + cbh2 = row["pft2-cbh"] + # overwrite missing values from .csv file + if pft1 == -999: + pft1 = 0 + pctpft1 = 0 + cth1 = 0 + cbh1 = 0 + if pft2 == -999: + pft2 = 0 + pctpft2 = 0 + cth2 = 0 + cbh2 = 0 + clmsite = "1x1_PLUMBER2_" + site + print("Now processing site :", site) + + if args.pft_16: + # use surface dataset with 16 pfts, but overwrite to 100% 1 dominant PFT + # don't set crop flag + # set dominant pft + subset_command = [ + "./subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--site", + clmsite, + "--dompft", + str(pft1), + str(pft2), + "--pctpft", + str(pctpft1), + str(pctpft2), + "--cth", + str(cth1), + str(cth2), + "--cbh", + str(cbh1), + str(cbh2), + "--create-surface", + "--uniform-snowpack", + "--cap-saturation", + "--verbose", + "--overwrite", + ] + else: + # use surface dataset with 78 pfts, and overwrite to 100% 1 dominant PFT + # NOTE: FATES will currently not run with a 78-PFT surface dataset + # set crop flag + # set dominant pft + subset_command = [ + "./subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--site", + clmsite, + "--crop", + "--dompft", + str(pft1), + str(pft2), + "--pctpft", + str(pctpft1), + str(pctpft2), + "--create-surface", + "--uniform-snowpack", + "--cap-saturation", + "--verbose", + "--overwrite", + ] + execute(subset_command) + + +if __name__ == "__main__": + main() diff --git a/python/ctsm/site_and_regional/plumber2_usermods.py b/python/ctsm/site_and_regional/plumber2_usermods.py new file mode 100644 index 0000000000..2fe183fb77 --- /dev/null +++ b/python/ctsm/site_and_regional/plumber2_usermods.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python3 + +""" + +Reads in .csv files with PLUMBER2 site information +Creates individual usermod_dirs for each PLUMBER2 site with shell_commands + +""" + +# Import libraries +from __future__ import print_function + +import os +import tqdm + +import pandas as pd + + +# Big ugly function to create usermod_dirs for each site +def write_usermods( + lat, lon, site, start_year, end_year, start_date, start_year_actual, start_tod, atm_ncpl, stop_n +): + """ + Write information to be added to user mods + """ + + site_dir = os.path.join("../../cime_config/usermods_dirs/PLUMBER2/", site) + + if not os.path.isdir(site_dir): + os.makedirs(site_dir, exist_ok=True) + + # create files in each directory + include = os.path.join(site_dir, "include_user_mods") + i_file = open(include, "w") # or 'a' to add text instead of truncate + i_file.write("../defaults") + i_file.close() + + # pylint: disable=anomalous-backslash-in-string + lai_stream = ( + "\$DIN_LOC_ROOT/lnd/clm2/lai_streams/PLUMBER2/" + + site + + "/LAI_stream_" + + site + + "_" + + str(start_year) + + "-" + + str(end_year) + + ".nc" + ) + shell = os.path.join(site_dir, "shell_commands") + s_file = open(shell, "w") # or 'a' to add text instead of truncate + # pylint: disable=line-too-long + s_file.write( + # TODO turn on following line after cdeps changes are added + #'./xmlchange PLUMBER2SITE='+site + '\n' \ + "./xmlchange PTS_LON=" + str(lon) + "\n" + "./xmlchange PTS_LAT=" + str(lat) + "\n" + "./xmlchange DATM_YR_END=" + str(end_year) + "\n" + "./xmlchange START_TOD=" + str(start_tod) + "\n" + "./xmlchange ATM_NCPL=" + str(atm_ncpl) + "\n" + "\n" # TODO, get working for CTSM5.1: + # remove the above line as it's redundant after PLUMBER2SITE is added + # Alternatively, we can take this out of default/user_nl_clm + # since doing it this way is works fine TODO for 5.2 + "echo \"fsurdat='/glade/u/home/wwieder/CTSM/tools/site_and_regional/subset_data_single_point/surfdata_1x1_PLUMBER2_" + + site + + "_hist_16pfts_Irrig_CMIP6_simyr2000_c231005.nc ' \" >> user_nl_clm \n" + 'echo "CLM_USRDAT.PLUMBER2:datafiles= \$DIN_LOC_ROOT/atm/datm7/CLM1PT_data/PLUMBER2/' + + site + + "/CLM1PT_data/CTSM_DATM_" + + site + + "_" + + str(start_year) + + "-" + + str(end_year) + + '.nc " >> user_nl_datm_streams \n' + 'echo "presaero.SSP3-7.0:year_first=' + str(start_year) + '" >> user_nl_datm_streams \n' + 'echo "presaero.SSP3-7.0:year_last=' + str(end_year) + '" >> user_nl_datm_streams \n' + 'echo "presaero.SSP3-7.0:year_align=' + str(start_year) + '" >> user_nl_datm_streams \n' + "\n" + 'echo "presndep.SSP3-7.0:year_first=' + str(start_year) + '" >> user_nl_datm_streams \n' + 'echo "presndep.SSP3-7.0:year_last=' + str(end_year) + '" >> user_nl_datm_streams \n' + 'echo "presndep.SSP3-7.0:year_align=' + str(start_year) + '" >> user_nl_datm_streams \n' + "\n" + 'echo "co2tseries.SSP3-7.0:year_first=' + str(start_year) + '" >> user_nl_datm_streams \n' + 'echo "co2tseries.SSP3-7.0:year_last=' + str(end_year) + '" >> user_nl_datm_streams \n' + 'echo "co2tseries.SSP3-7.0:year_align=' + str(start_year) + '" >> user_nl_datm_streams \n' + "\n" + "compset=`./xmlquery COMPSET --value` \n" + "CLM_USRDAT_NAME=`./xmlquery CLM_USRDAT_NAME --value` \n" + "TEST=`./xmlquery TEST --value` \n" + "\n" + "# For a transient case run the whole length and do not cycle \n" + "if [[ $compset =~ ^HIST ]]; then \n" + " # Number of years that can be run for the full transient case \n" + ' if [[ $TEST != "TRUE" ]]; then \n' + " ./xmlchange STOP_N=" + str(stop_n) + "\n" + " fi \n" + " # set start date for transient case with historical compset \n" + " ./xmlchange RUN_STARTDATE=" + str(start_date) + "\n" + " ./xmlchange DATM_YR_ALIGN=" + str(start_year_actual) + "\n" + " ./xmlchange DATM_YR_START=" + str(start_year_actual) + "\n" + "else \n" + " # for spinup case with I2000 compset \n" + " ./xmlchange RUN_STARTDATE=0001-01-01" + "\n" + " ./xmlchange DATM_YR_ALIGN=" + str(1) + "\n" + " ./xmlchange DATM_YR_START=" + str(start_year) + "\n" + "fi \n" + "\n" + "# Turn on LAI streams for a SP case \n" + "if [[ $compset =~ .*CLM[0-9]+%[^_]*SP.* ]]; then \n" + " echo \"stream_fldfilename_lai='" + lai_stream + "'\" >> user_nl_clm \n" + ' echo "stream_year_last_lai=' + str(end_year) + '" >> user_nl_clm \n' + " if [[ $compset =~ ^HIST ]]; then \n" + " # for transient case with a historical compset \n" + ' echo "model_year_align_lai=' + str(start_year_actual) + '" >> user_nl_clm \n' + ' echo "stream_year_first_lai=' + str(start_year_actual) + '" >> user_nl_clm \n' + " else \n" + " # for a spinup case with a i2000 compset \n" + ' echo "model_year_align_lai=1" >> user_nl_clm \n' + ' echo "stream_year_first_lai=' + str(start_year) + '" >> user_nl_clm \n' + " fi \n" + "fi \n" + "\n" + ) + # pylint: enable=line-too-long, anomalous-backslash-in-string + + s_file.close() + + # add baseflow_scalar = 0 to user_nl_clm for wetland sites + wetland = [ + "CZ-wet", + "DE-SfN", + "FI-Kaa", + "FI-Lom", + "RU-Che", + "SE-Deg", + "US-Los", + "US-Myb", + "US-Tw4", + "PL-wet", + ] + if any(x == site for x in wetland): + s_file = open(shell, "a") # or 'a' to add text instead of truncate + s_file.write( + "\n" + "# set baseflow scalar to zero for wetland site \n" + 'echo "baseflow_scalar = 0" >> user_nl_clm' + ) + s_file.close() + + +# End write_usermods function + + +def main(): + """ + Iterate through plumber2 sites and create usermod_dirs + """ + + # For now we can just run the 'main' program as a loop + plumber2_sites = pd.read_csv("PLUMBER2_sites.csv", skiprows=4) + + for _, row in tqdm.tqdm(plumber2_sites.iterrows()): + lat = row["Lat"] + lon = row["Lon"] + site = row["Site"] + start_year = row["start_year"] + end_year = row["end_year"] + start_date = row["RUN_STARTDATE"] + start_year_actual = start_date[:4] + start_tod = row["START_TOD"] + atm_ncpl = row["ATM_NCPL"] + stop_n = 1 + end_year - start_year + + write_usermods( + lat, + lon, + site, + start_year, + end_year, + start_date, + start_year_actual, + start_tod, + atm_ncpl, + stop_n, + ) + + +if __name__ == "__main__": + main() diff --git a/python/ctsm/test/test_unit_plumber2_surf_wrapper.py b/python/ctsm/test/test_unit_plumber2_surf_wrapper.py new file mode 100755 index 0000000000..66f5578caa --- /dev/null +++ b/python/ctsm/test/test_unit_plumber2_surf_wrapper.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +""" +Unit tests for plumber2_surf_wrapper + +You can run this by: + python -m unittest test_unit_plumber2_surf_wrapper.py +""" + +import unittest +import os +import sys + +# -- add python/ctsm to path (needed if we want to run the test stand-alone) +_CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) +sys.path.insert(1, _CTSM_PYTHON) + +# pylint: disable=wrong-import-position +from ctsm import unit_testing +from ctsm.site_and_regional.plumber2_surf_wrapper import get_parser + +# pylint: disable=invalid-name + + +class TestPlumber2SurfWrapper(unittest.TestCase): + """ + Basic class for testing plumber2_surf_wrapper.py. + """ + + def test_parser(self): + """ + Test that parser has same defaults as expected + """ + + self.assertEqual(get_parser().argument_default, None, "Parser not working as expected") + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/tools/site_and_regional/plumber2_surf_wrapper b/tools/site_and_regional/plumber2_surf_wrapper new file mode 100755 index 0000000000..b37bc19dc9 --- /dev/null +++ b/tools/site_and_regional/plumber2_surf_wrapper @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 +""" +This is a just top-level skeleton script that calls +plumber2_surf_wrapper.py. +The original code (plumber2_surf_wrapper.py) is located under +python/ctsm/site_and_regional folder. + +For full instructions on how to run the code and different options, +please check python/ctsm/site_and_regional/plumber2_surf_wrapper.py file. + +This script is a simple wrapper for plumber sites that performs the +following: + 1) For plumber sites, subset surface dataset from global dataset + 2) Download plumber and update the created surface dataset + based on the downloaded neon data. + +---------------------------------------------------------------- +Instructions for running using conda python environments: +../../py_env_create +conda activate ctsm_pylib +""" + +import os +import sys + +# -- add python/ctsm to path +_CTSM_PYTHON = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" +) +sys.path.insert(1, _CTSM_PYTHON) + +from ctsm.site_and_regional.plumber2_surf_wrapper import main + +if __name__ == "__main__": + main() diff --git a/tools/site_and_regional/plumber2_usermods b/tools/site_and_regional/plumber2_usermods new file mode 100755 index 0000000000..d093948336 --- /dev/null +++ b/tools/site_and_regional/plumber2_usermods @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +""" +This is a just top-level skeleton script that calls +plumber2_usermods.py. +The original code (plumber2_usermods.py) is located under +python/ctsm/site_and_regional folder. + +For full instructions on how to run the code and different options, +please check python/ctsm/site_and_regional/plumber2_usermods.py file. + +This script is a simple wrapper for plumber sites that creates usermod_dirs +for each site. + +---------------------------------------------------------------- +Instructions for running using conda python environments: +../../py_env_create +conda activate ctsm_pylib +""" + +import os +import sys + +# -- add python/ctsm to path +_CTSM_PYTHON = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" +) +sys.path.insert(1, _CTSM_PYTHON) + +from ctsm.site_and_regional.plumber2_usermods import main + +if __name__ == "__main__": + main() From 938be5fe1eaf4fe81546671447599eb6b6cf0345 Mon Sep 17 00:00:00 2001 From: Teagan King Date: Wed, 1 May 2024 14:05:41 -0600 Subject: [PATCH 2/2] remove the old files that were moved to a new directory --- .../plumber2_surf_wrapper.py | 143 ----------------- tools/site_and_regional/plumber2_usermods.py | 149 ------------------ 2 files changed, 292 deletions(-) delete mode 100755 tools/site_and_regional/plumber2_surf_wrapper.py delete mode 100644 tools/site_and_regional/plumber2_usermods.py diff --git a/tools/site_and_regional/plumber2_surf_wrapper.py b/tools/site_and_regional/plumber2_surf_wrapper.py deleted file mode 100755 index d68875ce6a..0000000000 --- a/tools/site_and_regional/plumber2_surf_wrapper.py +++ /dev/null @@ -1,143 +0,0 @@ -#! /usr/bin/env python3 -""" -|------------------------------------------------------------------| -|--------------------- Instructions -----------------------------| -|------------------------------------------------------------------| -This script is a simple wrapper for neon sites that performs the -following: - 1) For neon sites, subset surface dataset from global dataset - (i.e. ./subset_data.py ) - 2) Download neon and update the created surface dataset - based on the downloaded neon data. - (i.e. modify_singlept_site_neon.py) - -Instructions for running using conda python environments: - -../../py_env_create -conda activate ctsm_py - -""" -# Import libraries -from __future__ import print_function - -import os -import sys -import tqdm -import logging -import argparse -import subprocess - -import pandas as pd - - - - -def get_parser(): - """ - Get parser object for this script. - """ - parser = argparse.ArgumentParser(description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - - parser.print_usage = parser.print_help - - parser.add_argument('-v','--verbose', - help='Verbose mode will print more information. ', - action="store_true", - dest="verbose", - default=False) - - parser.add_argument('--16pft', - help='Create and/or modify 16-PFT surface datasets (e.g. for a FATES run) ', - action="store_true", - dest="pft_16", - default=True) - - return parser - - -def execute(command): - """ - Function for running a command on shell. - Args: - command (str): - command that we want to run. - Raises: - Error with the return code from shell. - """ - print ('\n',' >> ',*command,'\n') - - try: - subprocess.check_call(command, stdout=open(os.devnull, "w"), stderr=subprocess.STDOUT) - - except subprocess.CalledProcessError as e: - #raise RuntimeError("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output)) - #print (e.ouput) - print (e) - - - - - - -def main(): - - args = get_parser().parse_args() - - if args.verbose: - logging.basicConfig(level=logging.DEBUG) - - - plumber2_sites = pd.read_csv('PLUMBER2_sites.csv', skiprows=4) - - for i, row in tqdm.tqdm(plumber2_sites.iterrows()): - lat = row['Lat'] - lon = row['Lon'] - site = row['Site'] - pft1 = row['pft1'] - pctpft1 = row['pft1-%'] - cth1=row['pft1-cth'] - cbh1=row['pft1-cbh'] - pft2 = row['pft2'] - pctpft2 = row['pft2-%'] - cth2=row['pft2-cth'] - cbh2=row['pft2-cbh'] - # overwrite missing values from .csv file - if pft1 == -999: - pft1 = 0 - pctpft1 = 0 - cth1 = 0 - cbh1 = 0 - if pft2 == -999: - pft2 = 0 - pctpft2 = 0 - cth2 = 0 - cbh2 = 0 - clmsite = "1x1_PLUMBER2_"+site - print ("Now processing site :", site) - - if args.pft_16: - # use surface dataset with 16 pfts, but overwrite to 100% 1 dominant PFT - # don't set crop flag - # set dominant pft - subset_command = ['./subset_data','point','--lat',str(lat),'--lon',str(lon), - '--site',clmsite,'--dompft',str(pft1),str(pft2), - '--pctpft', str(pctpft1),str(pctpft2), - '--cth', str(cth1),str(cth2), - '--cbh', str(cbh1),str(cbh2), - '--create-surface', - '--uniform-snowpack','--cap-saturation','--verbose','--overwrite'] - else: - # use surface dataset with 78 pfts, and overwrite to 100% 1 dominant PFT - # NOTE: FATES will currently not run with a 78-PFT surface dataset - # set crop flag - # set dominant pft - subset_command = ['./subset_data', 'point', '--lat', str(lat), '--lon', str(lon), - '--site', clmsite,'--crop', '--dompft', str(pft1),str(pft2), - '--pctpft', str(pctpft1),str(pctpft2), '--create-surface', - '--uniform-snowpack', '--cap-saturation', '--verbose', '--overwrite'] - execute(subset_command) - -if __name__ == "__main__": - main() - diff --git a/tools/site_and_regional/plumber2_usermods.py b/tools/site_and_regional/plumber2_usermods.py deleted file mode 100644 index c12e539c31..0000000000 --- a/tools/site_and_regional/plumber2_usermods.py +++ /dev/null @@ -1,149 +0,0 @@ -#! /usr/bin/env python3 - -""" - -Reads in .csv files with PLUMBER2 site information -Creates individual usermod_dirs for each PLUMBER2 site with shell_commands - -""" - -# Import libraries -from __future__ import print_function - -import os -import sys -import tqdm -import logging -import subprocess - -import pandas as pd - - -# Big ugly function to create usermod_dirs for each site -def write_usermods(lat,lon,site,start_year,end_year, - start_date,start_year_actual,start_tod,atm_ncpl,stop_n): - - site_dir = os.path.join('../../cime_config/usermods_dirs/PLUMBER2/',site) - - if not os.path.isdir(site_dir): - os.makedirs(site_dir, exist_ok=True) - - # create files in each directory - include = os.path.join(site_dir,'include_user_mods') - iFile = open(include, 'w') # or 'a' to add text instead of truncate - iFile.write('../defaults') - iFile.close() - - LAIstream = '\$DIN_LOC_ROOT/lnd/clm2/lai_streams/PLUMBER2/'+site+'/LAI_stream_'+site+'_'+ \ - str(start_year)+'-'+str(end_year)+'.nc' - shell = os.path.join(site_dir,'shell_commands') - sFile = open(shell, 'w') # or 'a' to add text instead of truncate - sFile.write( - #TODO turn on following line after cdeps changes are added - #'./xmlchange PLUMBER2SITE='+site + '\n' \ - './xmlchange PTS_LON='+str(lon) + '\n' \ - './xmlchange PTS_LAT='+str(lat) + '\n' \ - './xmlchange DATM_YR_END='+str(end_year) + '\n' \ - './xmlchange START_TOD='+str(start_tod) + '\n' \ - './xmlchange ATM_NCPL='+str(atm_ncpl) + '\n' \ - '\n' \ - # TODO, get working for CTSM5.1, remove this line as it's redundant after PLUMBER2SITE is added - # Alternatively, we can take this out of default/user_nl_clm since doing it this way is works fine TODO for 5.2 - 'echo "fsurdat=\'/glade/u/home/wwieder/CTSM/tools/site_and_regional/subset_data_single_point/surfdata_1x1_PLUMBER2_'+site+'_hist_16pfts_Irrig_CMIP6_simyr2000_c231005.nc \' " >> user_nl_clm \n' \ - - 'echo "CLM_USRDAT.PLUMBER2:datafiles= \$DIN_LOC_ROOT/atm/datm7/CLM1PT_data/PLUMBER2/'+site+'/CLM1PT_data/CTSM_DATM_'+site+'_'+str(start_year)+'-'+str(end_year)+'.nc " >> user_nl_datm_streams \n' \ - - 'echo "presaero.SSP3-7.0:year_first='+str(start_year) + '" >> user_nl_datm_streams \n' \ - 'echo "presaero.SSP3-7.0:year_last='+str(end_year) + '" >> user_nl_datm_streams \n' \ - 'echo "presaero.SSP3-7.0:year_align='+str(start_year) + '" >> user_nl_datm_streams \n' \ - '\n' \ - - 'echo "presndep.SSP3-7.0:year_first='+str(start_year) + '" >> user_nl_datm_streams \n' \ - 'echo "presndep.SSP3-7.0:year_last='+str(end_year) + '" >> user_nl_datm_streams \n' \ - 'echo "presndep.SSP3-7.0:year_align='+str(start_year) + '" >> user_nl_datm_streams \n' \ - '\n' \ - - 'echo "co2tseries.SSP3-7.0:year_first='+str(start_year) + '" >> user_nl_datm_streams \n' \ - 'echo "co2tseries.SSP3-7.0:year_last='+str(end_year) + '" >> user_nl_datm_streams \n' \ - 'echo "co2tseries.SSP3-7.0:year_align='+str(start_year) + '" >> user_nl_datm_streams \n' \ - '\n' \ - - 'compset=`./xmlquery COMPSET --value` \n' \ - 'CLM_USRDAT_NAME=`./xmlquery CLM_USRDAT_NAME --value` \n' \ - 'TEST=`./xmlquery TEST --value` \n' \ - '\n' \ - - '# For a transient case run the whole length and do not cycle \n' \ - 'if [[ $compset =~ ^HIST ]]; then \n' \ - ' # Number of years that can be run for the full transient case \n' \ - ' if [[ $TEST != "TRUE" ]]; then \n' \ - ' ./xmlchange STOP_N='+str(stop_n) + '\n' \ - ' fi \n' \ - ' # set start date for transient case with historical compset \n' \ - ' ./xmlchange RUN_STARTDATE='+str(start_date) + '\n' \ - ' ./xmlchange DATM_YR_ALIGN='+str(start_year_actual) + '\n' \ - ' ./xmlchange DATM_YR_START='+str(start_year_actual) + '\n' \ - 'else \n' \ - ' # for spinup case with I2000 compset \n' \ - ' ./xmlchange RUN_STARTDATE=0001-01-01' + '\n' \ - ' ./xmlchange DATM_YR_ALIGN='+str(1) + '\n' \ - ' ./xmlchange DATM_YR_START='+str(start_year) + '\n' \ - 'fi \n' \ - '\n' \ - - '# Turn on LAI streams for a SP case \n' \ - 'if [[ $compset =~ .*CLM[0-9]+%[^_]*SP.* ]]; then \n' \ - ' echo "stream_fldfilename_lai=\''+LAIstream+'\'" >> user_nl_clm \n' \ - ' echo "stream_year_last_lai='+str(end_year) + '" >> user_nl_clm \n' \ - ' if [[ $compset =~ ^HIST ]]; then \n' \ - ' # for transient case with a historical compset \n' \ - ' echo "model_year_align_lai='+str(start_year_actual) + '" >> user_nl_clm \n' \ - ' echo "stream_year_first_lai='+str(start_year_actual) + '" >> user_nl_clm \n' \ - ' else \n' \ - ' # for a spinup case with a i2000 compset \n' \ - ' echo "model_year_align_lai=1" >> user_nl_clm \n' \ - ' echo "stream_year_first_lai='+str(start_year) + '" >> user_nl_clm \n' \ - ' fi \n' \ - 'fi \n' - '\n' \ - - ) - - sFile.close() - - # add baseflow_scalar = 0 to user_nl_clm for wetland sites - wetland = ["CZ-wet","DE-SfN","FI-Kaa","FI-Lom","RU-Che", \ - "SE-Deg","US-Los","US-Myb","US-Tw4","PL-wet"] - if any(x == site for x in wetland): - sFile = open(shell, 'a') # or 'a' to add text instead of truncate - sFile.write( - '\n' \ - '# set baseflow scalar to zero for wetland site \n' \ - 'echo "baseflow_scalar = 0" >> user_nl_clm' - ) - sFile.close() - -# End write_usermods function - -def main(): - # For now we can just run the 'main' program as a loop - plumber2_sites = pd.read_csv('PLUMBER2_sites.csv', skiprows=4) - - for i, row in tqdm.tqdm(plumber2_sites.iterrows()): - lat = row['Lat'] - lon = row['Lon'] - site = row['Site'] - start_year = row['start_year'] - end_year = row['end_year'] - start_date = row['RUN_STARTDATE'] - start_year_actual = start_date[:4] - start_tod = row['START_TOD'] - atm_ncpl = row['ATM_NCPL'] - stop_n = 1+end_year-start_year - - write_usermods(lat,lon,site,start_year,end_year, - start_date,start_year_actual,start_tod,atm_ncpl,stop_n) - -if __name__ == "__main__": - main() -