From abf29681a53903478c07808b0a92eceb84c98eee Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 12 Aug 2021 17:13:47 -0600 Subject: [PATCH 001/223] adding the skeleton file here... --- tools/site_and_regional/subset_data | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100755 tools/site_and_regional/subset_data diff --git a/tools/site_and_regional/subset_data b/tools/site_and_regional/subset_data new file mode 100755 index 0000000000..34399bdaea --- /dev/null +++ b/tools/site_and_regional/subset_data @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 +""" +""" + +import os +import sys + +_CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), + os.pardir, + os.pardir, + 'python') +#print (_CTSM_PYTHON) +sys.path.insert(1, _CTSM_PYTHON) + + +from ctsm.subset_data import main + +if __name__ == "__main__": + main() From efc8f9f4c6eeedf34dd7f7f3bcd5748b6c2497a4 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 17 Aug 2021 22:56:31 -0600 Subject: [PATCH 002/223] adding a module file for each class... suggested by Bill Sacks. --- python/ctsm/base_case.py | 127 ++++ python/ctsm/regional_case.py | 100 +++ python/ctsm/single_point_case.py | 244 +++++++ python/ctsm/subset_data.py | 1146 ++++++++++++++++++++++++++++++ 4 files changed, 1617 insertions(+) create mode 100644 python/ctsm/base_case.py create mode 100644 python/ctsm/regional_case.py create mode 100644 python/ctsm/single_point_case.py create mode 100755 python/ctsm/subset_data.py diff --git a/python/ctsm/base_case.py b/python/ctsm/base_case.py new file mode 100644 index 0000000000..eb4e48853c --- /dev/null +++ b/python/ctsm/base_case.py @@ -0,0 +1,127 @@ +import os +import numpy as np +import xarray as xr +import subprocess + +from datetime import date +from getpass import getuser + +myname = getuser() + +class BaseCase : + """ + Parent class to SinglePointCase and RegionalCase + + ... + + Attributes + ---------- + create_domain : bool + flag for creating domain file + create_surfdata : bool + flag for creating surface dataset + create_landuse : bool + flag for creating landuse file + create_datm : bool + flag for creating DATM files + + Methods + ------- + create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) + create 1d coordinate variables to enable sel() method + + add_tag_to_filename(filename, tag) + add a tag and timetag to a filename ending with + [._]cYYMMDD.nc or [._]YYMMDD.nc + """ + def __init__(self, create_domain, create_surfdata, create_landuse, create_datm): + self.create_domain = create_domain + self.create_surfdata = create_surfdata + self.create_landuse = create_landuse + self.create_datm = create_datm + + def __str__(self): + return str(self.__class__) + '\n' + '\n'.join((str(item) + ' = ' + str(self.__dict__[item]) + for item in sorted(self.__dict__))) + + @staticmethod + def create_1d_coord(filename, lon_varname , lat_varname , x_dim , y_dim): + """ + lon_varname : variable name that has 2d lon + lat_varname : variable name that has 2d lat + x_dim: dimension name in X -- lon + y_dim: dimension name in Y -- lat + """ + print( "Open file: "+filename ) + f1 = xr.open_dataset(filename) + + # create 1d coordinate variables to enable sel() method + lon0 = np.asarray(f1[lon_varname][0,:]) + lat0 = np.asarray(f1[lat_varname][:,0]) + lon = xr.DataArray(lon0,name='lon',dims=x_dim,coords={x_dim:lon0}) + lat = xr.DataArray(lat0,name='lat',dims=y_dim,coords={y_dim:lat0}) + + f2=f1.assign({'lon':lon,'lat':lat}) + + f2.reset_coords([lon_varname,lat_varname]) + f1.close() + return f2 + + @staticmethod + def add_tag_to_filename(filename, tag): + """ + Add a tag and replace timetag of a filename + # Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc + # Add the tag to just before that ending part + # and change the ending part to the current time tag + """ + basename = os.path.basename(filename) + cend = -10 + if ( basename[cend] == "c" ): + cend = cend - 1 + if ( (basename[cend] != ".") and (basename[cend] != "_") ): + print ( "Trouble figuring out where to add tag to filename:"+filename ) + os.abort() + today = date.today() + today_string = today.strftime("%y%m%d") + return( basename[:cend]+"_"+tag+"_c"+today_string +'.nc') + + def update_metadata(self, nc): + #update attributes + today = date.today() + today_string = today.strftime("%Y-%m-%d") + + #get git hash + sha = self.get_git_sha() + + nc.attrs['Created_on'] = today_string + nc.attrs['Created_by'] = myname + nc.attrs['Created_with'] = os.path.abspath(__file__) + " -- "+sha + + #delete unrelated attributes if they exist + del_attrs = ['source_code', 'SVN_url', 'hostname', 'history' + 'History_Log', 'Logname', 'Host', 'Version', + 'Compiler_Optimized'] + attr_list = nc.attrs + + for attr in del_attrs: + if attr in attr_list: + #print ("This attr should be deleted:", attr) + del(nc.attrs[attr]) + + + #for attr, value in attr_list.items(): + # print (attr + " = "+str(value)) + + + @staticmethod + def get_git_sha(): + """ + Returns Git short SHA for the currect directory. + """ + try: + sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() + except subprocess.CalledProcessError: + sha = "NOT-A-GIT-REPOSITORY" + return sha + diff --git a/python/ctsm/regional_case.py b/python/ctsm/regional_case.py new file mode 100644 index 0000000000..2ff633f23a --- /dev/null +++ b/python/ctsm/regional_case.py @@ -0,0 +1,100 @@ +from base_case import BaseCase + +import numpy as np +import xarray as xr + +class RegionalCase (BaseCase): + """ + A case to encapsulate regional cases. + """ + + def __init__(self, lat1, lat2, lon1, lon2, reg_name, + create_domain, create_surfdata, create_landuse, create_datm): + super().__init__(create_domain, create_surfdata, create_landuse, create_datm) + self.lat1 = lat1 + self.lat2 = lat2 + self.lon1 = lon1 + self.lon2 = lon2 + self.reg_name = reg_name + + def create_tag(self): + if self.reg_name: + self.tag = self.reg_name + else: + self.tag=str(self.lon1)+'-'+str(self.lon2)+'_'+str(self.lat1)+'-'+str(self.lat2) + + def create_domain_at_reg (self): + #print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print ("Creating domain file at region:", self.tag) + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') + lat = f2['lat'] + lon = f2['lon'] + # subset longitude and latitude arrays + xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3=f2.isel(nj=yind,ni=xind) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fdomain_in + + wfile=self.fdomain_out + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdomain_out)'+self.fdomain_out) + f2.close(); f3.close() + + + + def create_surfdata_at_reg(self): + #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print ("Creating surface dataset file at region:", self.tag) + # create 1d coordinate variables to enable sel() method + filename = self.fsurf_in + f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') + lat = f2['lat'] + lon = f2['lon'] + # subset longitude and latitude arrays + xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3=f2.isel(lsmlat=yind,lsmlon=xind) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fsurf_in + + # mode 'w' overwrites file + f3.to_netcdf(path=self.fsurf_out, mode='w') + print('created file (fsurf_out)'+self.fsurf_out) + #f1.close(); + f2.close(); f3.close() + + + def create_landuse_at_reg (self): + #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print ("Creating surface dataset file at region:",self.tag) + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') + lat = f2['lat'] + lon = f2['lon'] + # subset longitude and latitude arrays + xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3=f2.isel(lsmlat=yind,lsmlon=xind) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fluse_in + + wfile=self.fluse_out + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdomain_out)'+self.fdomain_out) + f2.close(); f3.close() + + + + + + diff --git a/python/ctsm/single_point_case.py b/python/ctsm/single_point_case.py new file mode 100644 index 0000000000..3d56ee7827 --- /dev/null +++ b/python/ctsm/single_point_case.py @@ -0,0 +1,244 @@ +from base_case import BaseCase +import os +import numpy as np +import xarray as xr +from datetime import date + +class SinglePointCase (BaseCase): + """ + A case to encapsulate single point cases. + + ... + + Attributes + ---------- + plat : float + latitude + plon : float + longitude + site_name: str -- default = None + Site name + + Methods + ------- + create_tag + create a tag for single point which is the site name + or the "lon-lat" format if the site name does not exist. + + create_domain_at_point + Create domain file at a single point. + create_landuse_at_point: + Create landuse file at a single point. + create_surfdata_at_point: + Create surface dataset at a single point. + create_datmdomain_at_point: + Create DATM domain file at a single point. + """ + + def __init__(self, plat, plon,site_name, + create_domain, create_surfdata, create_landuse, create_datm, + overwrite_single_pft, dominant_pft, zero_nonveg_landunits, + uniform_snowpack, no_saturation_excess): + super().__init__(create_domain, create_surfdata, create_landuse, create_datm) + self.plat = plat + self.plon = plon + self.site_name = site_name + self.overwrite_single_pft = overwrite_single_pft + self.dominant_pft = dominant_pft + self.zero_nonveg_landunits = zero_nonveg_landunits + self.uniform_snowpack = uniform_snowpack + self.no_saturation_excess = no_saturation_excess + + def create_tag(self): + if self.site_name: + self.tag = self.site_name + else: + self.tag=str(self.plon)+'_'+str(self.plat) + + @staticmethod + def create_fileout_name( filename,tag): + + basename = os.path.basename(filename) + items = basename.split('_') + today = date.today() + today_string = today.strftime("%y%m%d") + new_string = items[0]+"_"+items[2]+"_"+items[3]+"_"+ items[4] \ + +"_"+items[5]+"_"+items[6]+"_"+tag+"_c"+today_string+".nc" + return new_string + + def create_domain_at_point (self): + print( "----------------------------------------------------------------------") + print ("Creating domain file at ", self.plon, self.plat) + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') + # extract gridcell closest to plon/plat + f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['nj','ni']) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fdomain_in + + wfile=self.fdomain_out + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdomain_out)'+self.fdomain_out) + f2.close(); f3.close() + + + + def create_landuse_at_point (self): + print( "----------------------------------------------------------------------") + print ("Creating landuse file at ", self.plon, self.plat, ".") + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') + # extract gridcell closest to plon/plat + f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') + + # expand dimensions + f3 = f3.expand_dims(['lsmlat','lsmlon']) + # specify dimension order + #f3 = f3.transpose('time','lat','lon') + f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') + #f3['YEAR'] = f3['YEAR'].squeeze() + + # revert expand dimensions of YEAR + year = np.squeeze(np.asarray(f3['YEAR'])) + x = xr.DataArray(year, coords={'time':f3['time']}, dims='time', name='YEAR') + x.attrs['units']='unitless' + x.attrs['long_name']='Year of PFT data' + f3['YEAR'] = x + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fluse_in + + wfile = self.fluse_out + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (luse_out)'+self.fluse_out,".") + f2.close(); f3.close() + + + def create_surfdata_at_point(self): + print( "----------------------------------------------------------------------") + print ("Creating surface dataset file at ", self.plon, self.plat, ".") + # create 1d coordinate variables to enable sel() method + filename = self.fsurf_in + f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') + # extract gridcell closest to plon/plat + f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['lsmlat','lsmlon']).copy(deep=True) + + # modify surface data properties + if self.overwrite_single_pft: + f3['PCT_NAT_PFT'][:,:,:] = 0 + f3['PCT_NAT_PFT'][:,:,self.dominant_pft] = 100 + if self.zero_nonveg_landunits: + f3['PCT_NATVEG'][:,:] = 100 + f3['PCT_CROP'][:,:] = 0 + f3['PCT_LAKE'][:,:] = 0. + f3['PCT_WETLAND'][:,:] = 0. + f3['PCT_URBAN'][:,:,] = 0. + f3['PCT_GLACIER'][:,:] = 0. + if self.uniform_snowpack: + f3['STD_ELEV'][:,:] = 20. + if self.no_saturation_excess: + f3['FMAX'][:,:] = 0. + + # specify dimension order + #f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') + f3 = f3.transpose(u'time', u'cft', u'lsmpft', u'natpft', u'nglcec', u'nglcecp1', u'nlevsoi', u'nlevurb', u'numrad', u'numurbl', 'lsmlat', 'lsmlon') + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fsurf_in + del(f3.attrs['History_Log']) + # mode 'w' overwrites file + f3.to_netcdf(path=self.fsurf_out, mode='w') + print('Successfully created file (fsurf_out) :'+self.fsurf_out) + f2.close(); f3.close() + + def create_datmdomain_at_point(self): + print( "----------------------------------------------------------------------") + print("Creating DATM domain file at ", self.plon, self.plat, ".") + # create 1d coordinate variables to enable sel() method + filename = self.fdatmdomain_in + f2 = self.create_1d_coord(filename, 'xc','yc','ni','nj') + # extract gridcell closest to plon/plat + f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['nj','ni']) + wfile=self.fdatmdomain_out + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fdatmdomain_in + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdatmdomain_out) :'+self.fdatmdomain_out) + f2.close(); f3.close() + + def extract_datm_at(self, file_in, file_out): + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(file_in, 'LONGXY','LATIXY','lon','lat') + # extract gridcell closest to plon/plat + f3 = f2.sel(lon=self.plon,lat=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['lat','lon']) + # specify dimension order + f3 = f3.transpose(u'scalar','time','lat','lon') + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = file_in + # mode 'w' overwrites file + f3.to_netcdf(path=file_out, mode='w') + print('Successfully created file :'+ file_out) + f2.close(); f3.close() + + def create_datm_at_point(self): + print( "----------------------------------------------------------------------") + print("Creating DATM files at ", self.plon, self.plat, ".") + #-- specify subdirectory names and filename prefixes + solrdir = 'Solar/' + precdir = 'Precip/' + tpqwldir = 'TPHWL/' + prectag = 'clmforc.GSWP3.c2011.0.5x0.5.Prec.' + solrtag = 'clmforc.GSWP3.c2011.0.5x0.5.Solr.' + tpqwtag = 'clmforc.GSWP3.c2011.0.5x0.5.TPQWL.' + + #-- create data files + infile=[] + outfile=[] + for y in range(self.datm_syr,self.datm_eyr+1): + ystr=str(y) + for m in range(1,13): + mstr=str(m) + if m < 10: + mstr='0'+mstr + + dtag=ystr+'-'+mstr + + fsolar=self.dir_input_datm+solrdir+solrtag+dtag+'.nc' + fsolar2=self.dir_output_datm+solrtag+self.tag+'.'+dtag+'.nc' + fprecip=self.dir_input_datm+precdir+prectag+dtag+'.nc' + fprecip2=self.dir_output_datm+prectag+self.tag+'.'+dtag+'.nc' + ftpqw=self.dir_input_datm+tpqwldir+tpqwtag+dtag+'.nc' + ftpqw2=self.dir_output_datm+tpqwtag+self.tag+'.'+dtag+'.nc' + + infile+=[fsolar,fprecip,ftpqw] + outfile+=[fsolar2,fprecip2,ftpqw2] + + nm=len(infile) + for n in range(nm): + print(outfile[n]) + file_in = infile[n] + file_out = outfile[n] + self.extract_datm_at(file_in, file_out) + + + print('All DATM files are created in: '+self.dir_output_datm) + + + diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py new file mode 100755 index 0000000000..8b26e24f35 --- /dev/null +++ b/python/ctsm/subset_data.py @@ -0,0 +1,1146 @@ +#! /usr/bin/env python +""" +|------------------------------------------------------------------| +|--------------------- Instructions -----------------------------| +|------------------------------------------------------------------| + +Instructions for running on Cheyenne/Casper: + +load the following into your local environment + module load python + ncar_pylib + +------------------------------------------------------------------- +To see the available options for single point cases: + ./subset_data.py point --help + +To see the available options for regional cases: + ./subset_data.py reg --help +------------------------------------------------------------------- + +This script extracts domain files, surface dataset, and DATM files +at either a single point or a region using the global dataset. + +After creating a case using a global compset, run preview_namelist. +From the resulting lnd_in file in the run directory, find the name +of the domain file, and the surface data file. +From the datm streams files (e.g. datm.streams.txt.CLMGSWP3v1.Precip) +find the name of the datm forcing data domain file and forcing files. +Use these file names as the sources for the single point/regional +files to be created (see below). + +After running this script, point to the new CLM domain and surface +dataset using the user_nl_clm file in the case directory. In addition, +copy the datm.streams files to the case directory, with the prefix +'user_', e.g. user_datm.streams.txt.CLMGSWP3v1.Precip. Change the +information in the user_datm.streams* files to point to the single +point datm data (domain and forcing files) created using this script. + +The domain file is not set via user_nl_clm, but requires changing +LND_DOMAIN and ATM_DOMAIN (and their paths) in env_run.xml. + +Using single point forcing data requires specifying the nearest +neighbor mapping algorithm for the datm streams (usually they are +the first three in the list) in user_nl_datm: mapalgo = 'nn','nn','nn', +..., where the '...' can still be 'bilinear', etc, depending on the +other streams that are being used, e.g. aerosols, anomaly forcing, +bias correction. + +The file env_mach_pes.xml should be modified to specify a single +processor. The mpi-serial libraries should also be used, and can be +set in env_build.xml by changing "MPILIB" to "mpi-serial" prior to +setting up the case. + +The case for the single point simulation should have river routing +and land ice models turned off (i.e. the compset should use stub +models SROF and SGLC) + +------------------------------------------------------------------- +To run the script for a single point: + ./subset_data.py point + +To run the script for a region: + ./subset_data.py reg + +To remove NPL from your environment on Cheyenne/Casper: + deactivate +------------------------------------------------------------------- + +""" +# TODO +# Automatic downloading of missing files if they are missing +# default 78 pft vs 16 pft + +# Import libraries +from __future__ import print_function + +import sys +import os +import string +import logging +import subprocess +import argparse + +import numpy as np +import xarray as xr + +from datetime import date +from getpass import getuser +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + +myname = getuser() + +def get_parser(): + """Get parser object for this script.""" + #parser = ArgumentParser(description=__doc__, + # formatter_class=ArgumentDefaultsHelpFormatter) + parser = ArgumentParser(description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + + parser.print_usage = parser.print_help + subparsers = parser.add_subparsers( + help='Two possible ways to run this sript, either:', + dest ='run_type') + pt_parser = subparsers.add_parser('point', + help = 'Run script for a single point.') + rg_parser = subparsers.add_parser('reg', + help = 'Run script for a region.') + + + pt_parser.add_argument('--lat', + help='Single point latitude. [default: %(default)s]', + action="store", + dest="plat", + required=False, + type = plat_type, + default=42.5) + pt_parser.add_argument('--lon', + help='Single point longitude. [default: %(default)s]', + action="store", + dest="plon", + required=False, + type = plon_type, + default= 287.8 ) + pt_parser.add_argument('--site', + help='Site name or tag. [default: %(default)s]', + action="store", + dest="site_name", + required = False, + type = str, + default = '') + pt_parser.add_argument('--create_domain', + help='Flag for creating CLM domain file at single point. [default: %(default)s]', + action="store", + dest="create_domain", + type = str2bool, + nargs = '?', + const = True, + required = False, + default = False) + pt_parser.add_argument('--create_surface', + help='Flag for creating surface data file at single point. [default: %(default)s]', + action="store", + dest="create_surfdata", + type = str2bool, + nargs = '?', + const = True, + required = False, + default = True) + pt_parser.add_argument('--create_landuse', + help='Flag for creating landuse data file at single point. [default: %(default)s]', + action="store", + dest="create_landuse", + type = str2bool, + nargs = '?', + const = True, + required = False, + default = False) + pt_parser.add_argument('--create_datm', + help='Flag for creating DATM forcing data at single point. [default: %(default)s]', + action="store", + dest="create_datm", + type = str2bool, + nargs = '?', + const = True, + required = False, + default = False) + pt_parser.add_argument('--datm_syr', + help='Start year for creating DATM forcing at single point. [default: %(default)s]', + action="store", + dest="datm_syr", + required = False, + type = int, + default = 1901) + pt_parser.add_argument('--datm_eyr', + help='End year for creating DATM forcing at single point. [default: %(default)s]', + action="store", + dest="datm_eyr", + required = False, + type = int, + default = 2014) + pt_parser.add_argument('--crop', + help='Create datasets using the extensive list of prognostic crop types. [default: %(default)s]', + action="store_true", + dest="crop_flag", + default=False) + pt_parser.add_argument('--dompft', + help='Dominant PFT type . [default: %(default)s] ', + action="store", + dest="dom_pft", + type =int, + default=7) + pt_parser.add_argument('--no-unisnow', + help='Turn off the flag for create uniform snowpack. [default: %(default)s]', + action="store_false", + dest="uni_snow", + default=True) + pt_parser.add_argument('--no-overwrite_single_pft', + help='Turn off the flag for making the whole grid 100%% single PFT. [default: %(default)s]', + action="store_false", + dest="overwrite_single_pft", + default=True) + pt_parser.add_argument('--zero_nonveg', + help='Set all non-vegetation landunits to zero. [default: %(default)s]', + action="store", + dest="zero_nonveg", + type =bool, + default=True) + pt_parser.add_argument('--no_saturation_excess', + help='Turn off the flag for saturation excess. [default: %(default)s]', + action="store", + dest="no_saturation_excess", + type =bool, + default=True) + pt_parser.add_argument('--outdir', + help='Output directory. [default: %(default)s]', + action="store", + dest="out_dir", + type =str, + default="/glade/scratch/"+myname+"/single_point/") + + rg_parser.add_argument('--lat1', + help='Region start latitude. [default: %(default)s]', + action="store", + dest="lat1", + required=False, + type = plat_type, + default=-40) + rg_parser.add_argument('--lat2', + help='Region end latitude. [default: %(default)s]', + action="store", + dest="lat2", + required=False, + type = plat_type, + default=15) + rg_parser.add_argument('--lon1', + help='Region start longitude. [default: %(default)s]', + action="store", + dest="lon1", + required=False, + type = plon_type, + default= 275. ) + rg_parser.add_argument('--lon2', + help='Region end longitude. [default: %(default)s]', + action="store", + dest="lon2", + required=False, + type = plon_type, + default= 330. ) + rg_parser.add_argument('--reg', + help='Region name or tag. [default: %(default)s]', + action="store", + dest="reg_name", + required = False, + type = str, + default = '') + rg_parser.add_argument('--create_domain', + help='Flag for creating CLM domain file for a region. [default: %(default)s]', + action="store", + dest="create_domain", + type = str2bool, + nargs = '?', + const = True, + required = False, + default = False) + rg_parser.add_argument('--create_surface', + help='Flag for creating surface data file for a region. [default: %(default)s]', + action="store", + dest="create_surfdata", + type = str2bool, + nargs = '?', + const = True, + required = False, + default = True) + rg_parser.add_argument('--create_landuse', + help='Flag for creating landuse data file for a region. [default: %(default)s]', + action="store", + dest="create_landuse", + type = str2bool, + nargs = '?', + const = True, + required = False, + default = False) + rg_parser.add_argument('--create_datm', + help='Flag for creating DATM forcing data for a region. [default: %(default)s]', + action="store", + dest="create_datm", + type = str2bool, + nargs = '?', + const = True, + required = False, + default = False) + rg_parser.add_argument('--datm_syr', + help='Start year for creating DATM forcing for a region. [default: %(default)s]', + action="store", + dest="datm_syr", + required = False, + type = int, + default = 1901) + rg_parser.add_argument('--datm_eyr', + help='End year for creating DATM forcing for a region. [default: %(default)s]', + action="store", + dest="datm_eyr", + required = False, + type = int, + default = 2014) + rg_parser.add_argument('--crop', + help='Create datasets using the extensive list of prognostic crop types. [default: %(default)s]', + action="store_true", + dest="crop_flag", + default=False) + rg_parser.add_argument('--dompft', + help='Dominant PFT type . [default: %(default)s] ', + action="store", + dest="dom_pft", + type =int, + default=7) + rg_parser.add_argument('--outdir', + help='Output directory. [default: %(default)s]', + action="store", + dest="out_dir", + type =str, + default="/glade/scratch/"+myname+"/regional/") + + return parser + +def str2bool(v): + """ + Function for converting different forms of + command line boolean strings to boolean value. + + Args: + v (str): String bool input + + Raises: + if the argument is not an acceptable boolean string + (such as yes or no ; true or false ; y or n ; t or f ; 0 or 1). + argparse.ArgumentTypeError: The string should be one of the mentioned values. + + Returns: + bool: Boolean value corresponding to the input. + """ + if isinstance(v, bool): + return v + if v.lower() in ('yes', 'true', 't', 'y', '1'): + return True + elif v.lower() in ('no', 'false', 'f', 'n', '0'): + return False + else: + raise argparse.ArgumentTypeError('Boolean value expected. [true or false] or [y or n]') + + +def plat_type(x): + """ + Function to define lat type for the parser + and + raise error if latitude is not between -90 and 90. + """ + x = float(x) + if (x < -90) or (x > 90): + raise argparse.ArgumentTypeError("ERROR: Latitude should be between -90 and 90.") + return x + + +def plon_type(x): + """ + Function to define lon type for the parser and + convert negative longitudes and + raise error if lon is not between -180 and 360. + """ + x = float(x) + if (-180 < x) and (x < 0): + print ("lon is :", lon) + x= x%360 + print ("after modulo lon is :", lon) + if (x < 0) or (x > 360): + raise argparse.ArgumentTypeError("ERROR: Latitude of single point should be between 0 and 360 or -180 and 180.") + return x + +def get_git_sha(): + """ + Returns Git short SHA for the currect directory. + """ + try: + sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() + except subprocess.CalledProcessError: + sha = "NOT-A-GIT-REPOSITORY" + return sha + +class BaseCase : + """ + Parent class to SinglePointCase and RegionalCase + + ... + + Attributes + ---------- + create_domain : bool + flag for creating domain file + create_surfdata : bool + flag for creating surface dataset + create_landuse : bool + flag for creating landuse file + create_datm : bool + flag for creating DATM files + + Methods + ------- + create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) + create 1d coordinate variables to enable sel() method + + add_tag_to_filename(filename, tag) + add a tag and timetag to a filename ending with + [._]cYYMMDD.nc or [._]YYMMDD.nc + """ + def __init__(self, create_domain, create_surfdata, create_landuse, create_datm): + self.create_domain = create_domain + self.create_surfdata = create_surfdata + self.create_landuse = create_landuse + self.create_datm = create_datm + + def __str__(self): + return str(self.__class__) + '\n' + '\n'.join((str(item) + ' = ' + str(self.__dict__[item]) + for item in sorted(self.__dict__))) + + @staticmethod + def create_1d_coord(filename, lon_varname , lat_varname , x_dim , y_dim): + """ + lon_varname : variable name that has 2d lon + lat_varname : variable name that has 2d lat + x_dim: dimension name in X -- lon + y_dim: dimension name in Y -- lat + """ + print( "Open file: "+filename ) + f1 = xr.open_dataset(filename) + + # create 1d coordinate variables to enable sel() method + lon0 = np.asarray(f1[lon_varname][0,:]) + lat0 = np.asarray(f1[lat_varname][:,0]) + lon = xr.DataArray(lon0,name='lon',dims=x_dim,coords={x_dim:lon0}) + lat = xr.DataArray(lat0,name='lat',dims=y_dim,coords={y_dim:lat0}) + + f2=f1.assign({'lon':lon,'lat':lat}) + + f2.reset_coords([lon_varname,lat_varname]) + f1.close() + return f2 + + @staticmethod + def add_tag_to_filename(filename, tag): + """ + Add a tag and replace timetag of a filename + # Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc + # Add the tag to just before that ending part + # and change the ending part to the current time tag + """ + basename = os.path.basename(filename) + cend = -10 + if ( basename[cend] == "c" ): + cend = cend - 1 + if ( (basename[cend] != ".") and (basename[cend] != "_") ): + print ( "Trouble figuring out where to add tag to filename:"+filename ) + os.abort() + today = date.today() + today_string = today.strftime("%y%m%d") + return( basename[:cend]+"_"+tag+"_c"+today_string +'.nc') + + @staticmethod + def update_metadata(nc): + #update attributes + today = date.today() + today_string = today.strftime("%Y-%m-%d") + + #get git hash + sha = get_git_sha() + + nc.attrs['Created_on'] = today_string + nc.attrs['Created_by'] = myname + nc.attrs['Created_with'] = os.path.abspath(__file__) + " -- "+sha + + #delete unrelated attributes if they exist + del_attrs = ['source_code', 'SVN_url', 'hostname', 'history' + 'History_Log', 'Logname', 'Host', 'Version', + 'Compiler_Optimized'] + attr_list = nc.attrs + + for attr in del_attrs: + if attr in attr_list: + #print ("This attr should be deleted:", attr) + del(nc.attrs[attr]) + + + #for attr, value in attr_list.items(): + # print (attr + " = "+str(value)) + + + +class SinglePointCase (BaseCase): + """ + A case to encapsulate single point cases. + + ... + + Attributes + ---------- + plat : float + latitude + plon : float + longitude + site_name: str -- default = None + Site name + + Methods + ------- + create_tag + create a tag for single point which is the site name + or the "lon-lat" format if the site name does not exist. + + create_domain_at_point + Create domain file at a single point. + create_landuse_at_point: + Create landuse file at a single point. + create_surfdata_at_point: + Create surface dataset at a single point. + create_datmdomain_at_point: + Create DATM domain file at a single point. + """ + + def __init__(self, plat, plon,site_name, + create_domain, create_surfdata, create_landuse, create_datm, + overwrite_single_pft, dominant_pft, zero_nonveg_landunits, + uniform_snowpack, no_saturation_excess): + super().__init__(create_domain, create_surfdata, create_landuse, create_datm) + self.plat = plat + self.plon = plon + self.site_name = site_name + self.overwrite_single_pft = overwrite_single_pft + self.dominant_pft = dominant_pft + self.zero_nonveg_landunits = zero_nonveg_landunits + self.uniform_snowpack = uniform_snowpack + self.no_saturation_excess = no_saturation_excess + + def create_tag(self): + if self.site_name: + self.tag = self.site_name + else: + self.tag=str(self.plon)+'_'+str(self.plat) + + @staticmethod + def create_fileout_name( filename,tag): + + basename = os.path.basename(filename) + items = basename.split('_') + today = date.today() + today_string = today.strftime("%y%m%d") + new_string = items[0]+"_"+items[2]+"_"+items[3]+"_"+ items[4] \ + +"_"+items[5]+"_"+items[6]+"_"+tag+"_c"+today_string+".nc" + return new_string + + def create_domain_at_point (self): + print( "----------------------------------------------------------------------") + print ("Creating domain file at ", self.plon, self.plat) + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') + # extract gridcell closest to plon/plat + f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['nj','ni']) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fdomain_in + + wfile=self.fdomain_out + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdomain_out)'+self.fdomain_out) + f2.close(); f3.close() + + + def create_landuse_at_point (self): + print( "----------------------------------------------------------------------") + print ("Creating landuse file at ", self.plon, self.plat, ".") + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') + # extract gridcell closest to plon/plat + f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') + + # expand dimensions + f3 = f3.expand_dims(['lsmlat','lsmlon']) + # specify dimension order + #f3 = f3.transpose('time','lat','lon') + f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') + #f3['YEAR'] = f3['YEAR'].squeeze() + + # revert expand dimensions of YEAR + year = np.squeeze(np.asarray(f3['YEAR'])) + x = xr.DataArray(year, coords={'time':f3['time']}, dims='time', name='YEAR') + x.attrs['units']='unitless' + x.attrs['long_name']='Year of PFT data' + f3['YEAR'] = x + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fluse_in + + wfile = self.fluse_out + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (luse_out)'+self.fluse_out,".") + f2.close(); f3.close() + + def create_surfdata_at_point(self): + print( "----------------------------------------------------------------------") + print ("Creating surface dataset file at ", self.plon, self.plat, ".") + # create 1d coordinate variables to enable sel() method + filename = self.fsurf_in + f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') + # extract gridcell closest to plon/plat + f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['lsmlat','lsmlon']).copy(deep=True) + + # modify surface data properties + if self.overwrite_single_pft: + f3['PCT_NAT_PFT'][:,:,:] = 0 + f3['PCT_NAT_PFT'][:,:,self.dominant_pft] = 100 + if self.zero_nonveg_landunits: + f3['PCT_NATVEG'][:,:] = 100 + f3['PCT_CROP'][:,:] = 0 + f3['PCT_LAKE'][:,:] = 0. + f3['PCT_WETLAND'][:,:] = 0. + f3['PCT_URBAN'][:,:,] = 0. + f3['PCT_GLACIER'][:,:] = 0. + if self.uniform_snowpack: + f3['STD_ELEV'][:,:] = 20. + if self.no_saturation_excess: + f3['FMAX'][:,:] = 0. + + # specify dimension order + #f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') + f3 = f3.transpose(u'time', u'cft', u'lsmpft', u'natpft', u'nglcec', u'nglcecp1', u'nlevsoi', u'nlevurb', u'numrad', u'numurbl', 'lsmlat', 'lsmlon') + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fsurf_in + del(f3.attrs['History_Log']) + # mode 'w' overwrites file + f3.to_netcdf(path=self.fsurf_out, mode='w') + print('Successfully created file (fsurf_out) :'+self.fsurf_out) + f2.close(); f3.close() + + def create_datmdomain_at_point(self): + print( "----------------------------------------------------------------------") + print("Creating DATM domain file at ", self.plon, self.plat, ".") + # create 1d coordinate variables to enable sel() method + filename = self.fdatmdomain_in + f2 = self.create_1d_coord(filename, 'xc','yc','ni','nj') + # extract gridcell closest to plon/plat + f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['nj','ni']) + wfile=self.fdatmdomain_out + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fdatmdomain_in + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdatmdomain_out) :'+self.fdatmdomain_out) + f2.close(); f3.close() + + def extract_datm_at(self, file_in, file_out): + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(file_in, 'LONGXY','LATIXY','lon','lat') + # extract gridcell closest to plon/plat + f3 = f2.sel(lon=self.plon,lat=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['lat','lon']) + # specify dimension order + f3 = f3.transpose(u'scalar','time','lat','lon') + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = file_in + # mode 'w' overwrites file + f3.to_netcdf(path=file_out, mode='w') + print('Successfully created file :'+ file_out) + f2.close(); f3.close() + + def create_datm_at_point(self): + print( "----------------------------------------------------------------------") + print("Creating DATM files at ", self.plon, self.plat, ".") + #-- specify subdirectory names and filename prefixes + solrdir = 'Solar/' + precdir = 'Precip/' + tpqwldir = 'TPHWL/' + prectag = 'clmforc.GSWP3.c2011.0.5x0.5.Prec.' + solrtag = 'clmforc.GSWP3.c2011.0.5x0.5.Solr.' + tpqwtag = 'clmforc.GSWP3.c2011.0.5x0.5.TPQWL.' + + #-- create data files + infile=[] + outfile=[] + for y in range(self.datm_syr,self.datm_eyr+1): + ystr=str(y) + for m in range(1,13): + mstr=str(m) + if m < 10: + mstr='0'+mstr + + dtag=ystr+'-'+mstr + + fsolar=self.dir_input_datm+solrdir+solrtag+dtag+'.nc' + fsolar2=self.dir_output_datm+solrtag+self.tag+'.'+dtag+'.nc' + fprecip=self.dir_input_datm+precdir+prectag+dtag+'.nc' + fprecip2=self.dir_output_datm+prectag+self.tag+'.'+dtag+'.nc' + ftpqw=self.dir_input_datm+tpqwldir+tpqwtag+dtag+'.nc' + ftpqw2=self.dir_output_datm+tpqwtag+self.tag+'.'+dtag+'.nc' + + infile+=[fsolar,fprecip,ftpqw] + outfile+=[fsolar2,fprecip2,ftpqw2] + + nm=len(infile) + for n in range(nm): + print(outfile[n]) + file_in = infile[n] + file_out = outfile[n] + self.extract_datm_at(file_in, file_out) + + + print('All DATM files are created in: '+self.dir_output_datm) + +class RegionalCase (BaseCase): + """ + A case to encapsulate regional cases. + """ + + def __init__(self, lat1, lat2, lon1, lon2, reg_name, + create_domain, create_surfdata, create_landuse, create_datm): + super().__init__(create_domain, create_surfdata, create_landuse, create_datm) + self.lat1 = lat1 + self.lat2 = lat2 + self.lon1 = lon1 + self.lon2 = lon2 + self.reg_name = reg_name + + def create_tag(self): + if self.reg_name: + self.tag = self.reg_name + else: + self.tag=str(self.lon1)+'-'+str(self.lon2)+'_'+str(self.lat1)+'-'+str(self.lat2) + + def create_domain_at_reg (self): + #print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print ("Creating domain file at region:", self.tag) + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') + lat = f2['lat'] + lon = f2['lon'] + # subset longitude and latitude arrays + xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3=f2.isel(nj=yind,ni=xind) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fdomain_in + + wfile=self.fdomain_out + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdomain_out)'+self.fdomain_out) + f2.close(); f3.close() + + + def create_surfdata_at_reg(self): + #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print ("Creating surface dataset file at region:", self.tag) + # create 1d coordinate variables to enable sel() method + filename = self.fsurf_in + f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') + lat = f2['lat'] + lon = f2['lon'] + # subset longitude and latitude arrays + xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3=f2.isel(lsmlat=yind,lsmlon=xind) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fsurf_in + + # mode 'w' overwrites file + f3.to_netcdf(path=self.fsurf_out, mode='w') + print('created file (fsurf_out)'+self.fsurf_out) + #f1.close(); + f2.close(); f3.close() + + + def create_landuse_at_reg (self): + #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print ("Creating surface dataset file at region:",self.tag) + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') + lat = f2['lat'] + lon = f2['lon'] + # subset longitude and latitude arrays + xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3=f2.isel(lsmlat=yind,lsmlon=xind) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fluse_in + + wfile=self.fluse_out + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdomain_out)'+self.fdomain_out) + f2.close(); f3.close() + + +def setup_logging(log_file, log_level): + """ + Setup logging to log to console and log file. + """ + + root_logger = logging.getLogger() + root_logger.setLevel(log_level) + + # setup log file + one_mb = 1000000 + handler = logging.handlers.RotatingFileHandler(log_file, maxBytes=one_mb , backupCount=10) + + fmt = logging.Formatter( + '%(asctime)s %(name)-12s %(levelname)-8s %(message)s', + datefmt='%y-%m-%d %H:%M:%S') + + handler.setFormatter(fmt) + root_logger.addHandler(handler) + + # setup logging to console + stream_handler = logging.StreamHandler(sys.stdout) + stream_handler.setFormatter(fmt) + root_logger.addHandler(stream_handler) + + # redirect stdout/err to log file + StreamToLogger.setup_stdout() + StreamToLogger.setup_stderr() + + + +class StreamToLogger(object): + """ + Custom class to log all stdout and stderr streams. + modified from: + https://www.electricmonk.nl/log/2011/08/14/redirect-stdout-and-stderr-to-a-logger-in-python/ + """ + def __init__(self, stream, logger, log_level=logging.INFO, + also_log_to_stream=False): + self.logger = logger + self.stream = stream + self.log_level = log_level + self.linebuf = '' + self.also_log_to_stream = also_log_to_stream + + @classmethod + def setup_stdout(cls, also_log_to_stream=True): + """ + Setup logger for stdout + """ + stdout_logger = logging.getLogger('STDOUT') + sl = StreamToLogger(sys.stdout, stdout_logger, logging.INFO, also_log_to_stream) + sys.stdout = sl + + @classmethod + def setup_stderr(cls, also_log_to_stream=True): + """ + Setup logger for stdout + """ + stderr_logger = logging.getLogger('STDERR') + sl = StreamToLogger(sys.stderr, stderr_logger, logging.ERROR, also_log_to_stream) + sys.stderr = sl + + def write(self, buf): + temp_linebuf = self.linebuf + buf + self.linebuf = '' + for line in temp_linebuf.splitlines(True): + if line[-1] == '\n': + self.logger.log(self.log_level, line.rstrip()) + else: + self.linebuf += line + + def flush(self): + if self.linebuf != '': + self.logger.log(self.log_level, self.linebuf.rstrip()) + self.linebuf = '' + + + + +def main (): + + args = get_parser().parse_args() + + # --------------------------------- # + + today = date.today() + today_string = today.strftime("%Y%m%d") + + pwd = os.getcwd() + + log_file = os.path.join(pwd, today_string+'.log') + + log_level = logging.DEBUG + setup_logging(log_file, log_level) + log = logging.getLogger(__name__) + + print("User = "+myname) + print("Current directory = "+pwd) + + # --------------------------------- # + + if (args.run_type == "point"): + print( "----------------------------------------------------------------------------") + print( "This script extracts a single point from the global CTSM inputdata datasets." ) + + #-- Specify point to extract + plon = args.plon + plat = args.plat + + #-- Create regional CLM domain file + create_domain = args.create_domain + #-- Create CLM surface data file + create_surfdata = args.create_surfdata + #-- Create CLM surface data file + create_landuse = args.create_landuse + #-- Create single point DATM atmospheric forcing data + create_datm = args.create_datm + datm_syr = args.datm_syr + datm_eyr = args.datm_eyr + + crop_flag = args.crop_flag + + site_name = args.site_name + + #-- Modify landunit structure + overwrite_single_pft = args.overwrite_single_pft + dominant_pft = args.dom_pft + zero_nonveg_landunits= args.zero_nonveg + uniform_snowpack = args.uni_snow + no_saturation_excess = args.no_saturation_excess + + + #-- Create SinglePoint Object + single_point = SinglePointCase(plat, plon,site_name, + create_domain, create_surfdata, create_landuse, create_datm, + overwrite_single_pft, dominant_pft, zero_nonveg_landunits, uniform_snowpack, + no_saturation_excess) + single_point.create_tag() + + + print (single_point) + + if crop_flag: + num_pft = "78" + else: + num_pft = "16" + + print('crop_flag = '+ crop_flag.__str__()+ ' => num_pft ='+ num_pft) + + #-- Set input and output filenames + #-- Specify input and output directories + dir_output = args.out_dir + if ( not os.path.isdir( dir_output ) ): + os.mkdir( dir_output ) + + dir_inputdata='/glade/p/cesmdata/cseg/inputdata/' + dir_clm_forcedata='/glade/p/cgd/tss/CTSM_datm_forcing_data/' + dir_input_datm=os.path.join(dir_clm_forcedata,'atm_forcing.datm7.GSWP3.0.5d.v1.c170516/') + dir_output_datm=os.path.join(dir_output , 'datmdata/') + if ( not os.path.isdir( dir_output_datm ) ): + os.mkdir( dir_output_datm ) + + print ("dir_input_datm : ", dir_input_datm) # + print ("dir_output_datm : ", dir_output_datm) # + + + #-- Set time stamp + today = date.today() + timetag = today.strftime("%y%m%d") + + #-- Specify land domain file --------------------------------- + fdomain_in = os.path.join(dir_inputdata,'share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc') + fdomain_out = dir_output + single_point.add_tag_to_filename( fdomain_in, single_point.tag ) + single_point.fdomain_in = fdomain_in + single_point.fdomain_out = fdomain_out + print ("fdomain_in :",fdomain_in) # + print ("fdomain_out :",fdomain_out) # + + #-- Specify surface data file -------------------------------- + if crop_flag: + fsurf_in = os.path.join (dir_inputdata, 'lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc') + else: + fsurf_in = os.path.join (dir_inputdata, 'lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc') + + #fsurf_out = dir_output + single_point.add_tag_to_filename(fsurf_in, single_point.tag) # remove res from filename for singlept + fsurf_out = dir_output + single_point.create_fileout_name(fsurf_in, single_point.tag) + single_point.fsurf_in = fsurf_in + single_point.fsurf_out = fsurf_out + print ("fsurf_in :",fsurf_in) # + print ("fsurf_out :",fsurf_out) # + + #-- Specify landuse file ------------------------------------- + if crop_flag: + fluse_in = os.path.join (dir_inputdata,'lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc') + else: + fluse_in = os.path.join (dir_inputdata,'lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc') + #fluse_out = dir_output + single_point.add_tag_to_filename( fluse_in, single_point.tag ) # remove resolution from filename for singlept cases + fluse_out = dir_output + single_point.create_fileout_name(fluse_in, single_point.tag) + single_point.fluse_in = fluse_in + single_point.fluse_out = fluse_out + print ("fluse_in :", fluse_in) # + print ("fluse_out :", fluse_out) # + + #-- Specify datm domain file --------------------------------- + fdatmdomain_in = os.path.join (dir_clm_forcedata,'atm_forcing.datm7.GSWP3.0.5d.v1.c170516/domain.lnd.360x720_gswp3.0v1.c170606.nc') + fdatmdomain_out = dir_output_datm+single_point.add_tag_to_filename( fdatmdomain_in, single_point.tag ) + single_point.fdatmdomain_in = fdatmdomain_in + single_point.fdatmdomain_out = fdatmdomain_out + print ("fdatmdomain_in : ", fdatmdomain_in) # + print ("fdatmdomain out : ", fdatmdomain_out) # + + #-- Create CTSM domain file + if create_domain: + single_point.create_domain_at_point() + + #-- Create CTSM surface data file + if create_surfdata: + single_point.create_surfdata_at_point() + + #-- Create CTSM transient landuse data file + if create_landuse: + single_point.create_landuse_at_point() + + #-- Create single point atmospheric forcing data + if create_datm: + single_point.create_datmdomain_at_point() + single_point.datm_syr =datm_syr + single_point.datm_eyr =datm_eyr + single_point.dir_input_datm = dir_input_datm + single_point.dir_output_datm = dir_output_datm + single_point.create_datm_at_point() + + print( "Successfully ran script for single point." ) + exit() + + elif (args.run_type == "reg"): + print ("Running the script for the region") + #-- Specify region to extract + lat1 = args.lat1 + lat2 = args.lat2 + + lon1 = args.lon1 + lon2 = args.lon2 + + #-- Create regional CLM domain file + create_domain = args.create_domain + #-- Create CLM surface data file + create_surfdata = args.create_surfdata + #-- Create CLM surface data file + create_landuse = args.create_landuse + #-- Create DATM atmospheric forcing data + create_datm = args.create_datm + + crop_flag = args.crop_flag + + reg_name = args.reg_name + + region = RegionalCase(lat1, lat2, lon1, lon2, reg_name, create_domain, create_surfdata, create_landuse, create_datm) + + print (region) + + if crop_flag: + num_pft = "78" + else: + num_pft = "16" + + + print(' crop_flag = '+ crop_flag.__str__()+ ' num_pft ='+ num_pft) + + + region.create_tag() + + #-- Set input and output filenames + #-- Specify input and output directories + dir_output='/glade/scratch/'+myname+'/region/' + if ( not os.path.isdir( dir_output ) ): + os.mkdir( dir_output ) + + dir_inputdata='/glade/p/cesmdata/cseg/inputdata/' + dir_clm_forcedata='/glade/p/cgd/tss/CTSM_datm_forcing_data/' + + #-- Set time stamp + command='date "+%y%m%d"' + x2=subprocess.Popen(command,stdout=subprocess.PIPE,shell='True') + x=x2.communicate() + timetag = x[0].strip() + print (timetag) + + #-- Specify land domain file --------------------------------- + fdomain_in = dir_inputdata+'share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc' + fdomain_out = dir_output + 'domain.lnd.fv1.9x2.5_gx1v7.'+region.tag+'_170518.nc' + #SinglePointCase.set_fdomain (fdomain) + region.fdomain_in = fdomain_in + region.fdomain_out = fdomain_out + + #-- Specify surface data file -------------------------------- + fsurf_in = dir_inputdata+'lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc' + fsurf_out = dir_output + 'surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_'+region.tag+'_c170824.nc' + region.fsurf_in = fsurf_in + region.fsurf_out = fsurf_out + + #-- Specify landuse file ------------------------------------- + fluse_in = dir_inputdata+'lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc' + fluse_out = dir_output + 'landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_'+region.tag+'.c170824.nc' + region.fluse_in = fluse_in + region.fluse_out = fluse_out + + #-- Create CTSM domain file + if create_domain: + region.create_domain_at_reg() + + #-- Create CTSM surface data file + if create_surfdata: + region.create_surfdata_at_reg() + + #-- Create CTSM transient landuse data file + if create_landuse: + region.create_landuse_at_reg() + print( "Successfully ran script for a regional case." ) + + else : + # print help when no option is chosen + get_parser().print_help() + +if __name__ == "__main__": + main() From 89497d311d3a717b1872b4a635a2590b677bc023 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 17 Aug 2021 23:10:36 -0600 Subject: [PATCH 003/223] updating subset_data.py remove class definition from here. --- python/ctsm/subset_data.py | 437 +------------------------------------ 1 file changed, 5 insertions(+), 432 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 8b26e24f35..d0d2726d07 100755 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -1,4 +1,4 @@ -#! /usr/bin/env python +#!/usr/bin/env python3 """ |------------------------------------------------------------------| |--------------------- Instructions -----------------------------| @@ -88,6 +88,10 @@ from getpass import getuser from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +from base_case import BaseCase +from single_point_case import SinglePointCase +from regional_case import RegionalCase + myname = getuser() def get_parser(): @@ -386,437 +390,6 @@ def get_git_sha(): sha = "NOT-A-GIT-REPOSITORY" return sha -class BaseCase : - """ - Parent class to SinglePointCase and RegionalCase - - ... - - Attributes - ---------- - create_domain : bool - flag for creating domain file - create_surfdata : bool - flag for creating surface dataset - create_landuse : bool - flag for creating landuse file - create_datm : bool - flag for creating DATM files - - Methods - ------- - create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) - create 1d coordinate variables to enable sel() method - - add_tag_to_filename(filename, tag) - add a tag and timetag to a filename ending with - [._]cYYMMDD.nc or [._]YYMMDD.nc - """ - def __init__(self, create_domain, create_surfdata, create_landuse, create_datm): - self.create_domain = create_domain - self.create_surfdata = create_surfdata - self.create_landuse = create_landuse - self.create_datm = create_datm - - def __str__(self): - return str(self.__class__) + '\n' + '\n'.join((str(item) + ' = ' + str(self.__dict__[item]) - for item in sorted(self.__dict__))) - - @staticmethod - def create_1d_coord(filename, lon_varname , lat_varname , x_dim , y_dim): - """ - lon_varname : variable name that has 2d lon - lat_varname : variable name that has 2d lat - x_dim: dimension name in X -- lon - y_dim: dimension name in Y -- lat - """ - print( "Open file: "+filename ) - f1 = xr.open_dataset(filename) - - # create 1d coordinate variables to enable sel() method - lon0 = np.asarray(f1[lon_varname][0,:]) - lat0 = np.asarray(f1[lat_varname][:,0]) - lon = xr.DataArray(lon0,name='lon',dims=x_dim,coords={x_dim:lon0}) - lat = xr.DataArray(lat0,name='lat',dims=y_dim,coords={y_dim:lat0}) - - f2=f1.assign({'lon':lon,'lat':lat}) - - f2.reset_coords([lon_varname,lat_varname]) - f1.close() - return f2 - - @staticmethod - def add_tag_to_filename(filename, tag): - """ - Add a tag and replace timetag of a filename - # Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc - # Add the tag to just before that ending part - # and change the ending part to the current time tag - """ - basename = os.path.basename(filename) - cend = -10 - if ( basename[cend] == "c" ): - cend = cend - 1 - if ( (basename[cend] != ".") and (basename[cend] != "_") ): - print ( "Trouble figuring out where to add tag to filename:"+filename ) - os.abort() - today = date.today() - today_string = today.strftime("%y%m%d") - return( basename[:cend]+"_"+tag+"_c"+today_string +'.nc') - - @staticmethod - def update_metadata(nc): - #update attributes - today = date.today() - today_string = today.strftime("%Y-%m-%d") - - #get git hash - sha = get_git_sha() - - nc.attrs['Created_on'] = today_string - nc.attrs['Created_by'] = myname - nc.attrs['Created_with'] = os.path.abspath(__file__) + " -- "+sha - - #delete unrelated attributes if they exist - del_attrs = ['source_code', 'SVN_url', 'hostname', 'history' - 'History_Log', 'Logname', 'Host', 'Version', - 'Compiler_Optimized'] - attr_list = nc.attrs - - for attr in del_attrs: - if attr in attr_list: - #print ("This attr should be deleted:", attr) - del(nc.attrs[attr]) - - - #for attr, value in attr_list.items(): - # print (attr + " = "+str(value)) - - - -class SinglePointCase (BaseCase): - """ - A case to encapsulate single point cases. - - ... - - Attributes - ---------- - plat : float - latitude - plon : float - longitude - site_name: str -- default = None - Site name - - Methods - ------- - create_tag - create a tag for single point which is the site name - or the "lon-lat" format if the site name does not exist. - - create_domain_at_point - Create domain file at a single point. - create_landuse_at_point: - Create landuse file at a single point. - create_surfdata_at_point: - Create surface dataset at a single point. - create_datmdomain_at_point: - Create DATM domain file at a single point. - """ - - def __init__(self, plat, plon,site_name, - create_domain, create_surfdata, create_landuse, create_datm, - overwrite_single_pft, dominant_pft, zero_nonveg_landunits, - uniform_snowpack, no_saturation_excess): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm) - self.plat = plat - self.plon = plon - self.site_name = site_name - self.overwrite_single_pft = overwrite_single_pft - self.dominant_pft = dominant_pft - self.zero_nonveg_landunits = zero_nonveg_landunits - self.uniform_snowpack = uniform_snowpack - self.no_saturation_excess = no_saturation_excess - - def create_tag(self): - if self.site_name: - self.tag = self.site_name - else: - self.tag=str(self.plon)+'_'+str(self.plat) - - @staticmethod - def create_fileout_name( filename,tag): - - basename = os.path.basename(filename) - items = basename.split('_') - today = date.today() - today_string = today.strftime("%y%m%d") - new_string = items[0]+"_"+items[2]+"_"+items[3]+"_"+ items[4] \ - +"_"+items[5]+"_"+items[6]+"_"+tag+"_c"+today_string+".nc" - return new_string - - def create_domain_at_point (self): - print( "----------------------------------------------------------------------") - print ("Creating domain file at ", self.plon, self.plat) - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') - # extract gridcell closest to plon/plat - f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['nj','ni']) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdomain_in - - wfile=self.fdomain_out - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - - - def create_landuse_at_point (self): - print( "----------------------------------------------------------------------") - print ("Creating landuse file at ", self.plon, self.plat, ".") - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') - # extract gridcell closest to plon/plat - f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') - - # expand dimensions - f3 = f3.expand_dims(['lsmlat','lsmlon']) - # specify dimension order - #f3 = f3.transpose('time','lat','lon') - f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') - #f3['YEAR'] = f3['YEAR'].squeeze() - - # revert expand dimensions of YEAR - year = np.squeeze(np.asarray(f3['YEAR'])) - x = xr.DataArray(year, coords={'time':f3['time']}, dims='time', name='YEAR') - x.attrs['units']='unitless' - x.attrs['long_name']='Year of PFT data' - f3['YEAR'] = x - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fluse_in - - wfile = self.fluse_out - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (luse_out)'+self.fluse_out,".") - f2.close(); f3.close() - - def create_surfdata_at_point(self): - print( "----------------------------------------------------------------------") - print ("Creating surface dataset file at ", self.plon, self.plat, ".") - # create 1d coordinate variables to enable sel() method - filename = self.fsurf_in - f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') - # extract gridcell closest to plon/plat - f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['lsmlat','lsmlon']).copy(deep=True) - - # modify surface data properties - if self.overwrite_single_pft: - f3['PCT_NAT_PFT'][:,:,:] = 0 - f3['PCT_NAT_PFT'][:,:,self.dominant_pft] = 100 - if self.zero_nonveg_landunits: - f3['PCT_NATVEG'][:,:] = 100 - f3['PCT_CROP'][:,:] = 0 - f3['PCT_LAKE'][:,:] = 0. - f3['PCT_WETLAND'][:,:] = 0. - f3['PCT_URBAN'][:,:,] = 0. - f3['PCT_GLACIER'][:,:] = 0. - if self.uniform_snowpack: - f3['STD_ELEV'][:,:] = 20. - if self.no_saturation_excess: - f3['FMAX'][:,:] = 0. - - # specify dimension order - #f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') - f3 = f3.transpose(u'time', u'cft', u'lsmpft', u'natpft', u'nglcec', u'nglcecp1', u'nlevsoi', u'nlevurb', u'numrad', u'numurbl', 'lsmlat', 'lsmlon') - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fsurf_in - del(f3.attrs['History_Log']) - # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode='w') - print('Successfully created file (fsurf_out) :'+self.fsurf_out) - f2.close(); f3.close() - - def create_datmdomain_at_point(self): - print( "----------------------------------------------------------------------") - print("Creating DATM domain file at ", self.plon, self.plat, ".") - # create 1d coordinate variables to enable sel() method - filename = self.fdatmdomain_in - f2 = self.create_1d_coord(filename, 'xc','yc','ni','nj') - # extract gridcell closest to plon/plat - f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['nj','ni']) - wfile=self.fdatmdomain_out - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdatmdomain_in - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdatmdomain_out) :'+self.fdatmdomain_out) - f2.close(); f3.close() - - def extract_datm_at(self, file_in, file_out): - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(file_in, 'LONGXY','LATIXY','lon','lat') - # extract gridcell closest to plon/plat - f3 = f2.sel(lon=self.plon,lat=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['lat','lon']) - # specify dimension order - f3 = f3.transpose(u'scalar','time','lat','lon') - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = file_in - # mode 'w' overwrites file - f3.to_netcdf(path=file_out, mode='w') - print('Successfully created file :'+ file_out) - f2.close(); f3.close() - - def create_datm_at_point(self): - print( "----------------------------------------------------------------------") - print("Creating DATM files at ", self.plon, self.plat, ".") - #-- specify subdirectory names and filename prefixes - solrdir = 'Solar/' - precdir = 'Precip/' - tpqwldir = 'TPHWL/' - prectag = 'clmforc.GSWP3.c2011.0.5x0.5.Prec.' - solrtag = 'clmforc.GSWP3.c2011.0.5x0.5.Solr.' - tpqwtag = 'clmforc.GSWP3.c2011.0.5x0.5.TPQWL.' - - #-- create data files - infile=[] - outfile=[] - for y in range(self.datm_syr,self.datm_eyr+1): - ystr=str(y) - for m in range(1,13): - mstr=str(m) - if m < 10: - mstr='0'+mstr - - dtag=ystr+'-'+mstr - - fsolar=self.dir_input_datm+solrdir+solrtag+dtag+'.nc' - fsolar2=self.dir_output_datm+solrtag+self.tag+'.'+dtag+'.nc' - fprecip=self.dir_input_datm+precdir+prectag+dtag+'.nc' - fprecip2=self.dir_output_datm+prectag+self.tag+'.'+dtag+'.nc' - ftpqw=self.dir_input_datm+tpqwldir+tpqwtag+dtag+'.nc' - ftpqw2=self.dir_output_datm+tpqwtag+self.tag+'.'+dtag+'.nc' - - infile+=[fsolar,fprecip,ftpqw] - outfile+=[fsolar2,fprecip2,ftpqw2] - - nm=len(infile) - for n in range(nm): - print(outfile[n]) - file_in = infile[n] - file_out = outfile[n] - self.extract_datm_at(file_in, file_out) - - - print('All DATM files are created in: '+self.dir_output_datm) - -class RegionalCase (BaseCase): - """ - A case to encapsulate regional cases. - """ - - def __init__(self, lat1, lat2, lon1, lon2, reg_name, - create_domain, create_surfdata, create_landuse, create_datm): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm) - self.lat1 = lat1 - self.lat2 = lat2 - self.lon1 = lon1 - self.lon2 = lon2 - self.reg_name = reg_name - - def create_tag(self): - if self.reg_name: - self.tag = self.reg_name - else: - self.tag=str(self.lon1)+'-'+str(self.lon2)+'_'+str(self.lat1)+'-'+str(self.lat2) - - def create_domain_at_reg (self): - #print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating domain file at region:", self.tag) - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') - lat = f2['lat'] - lon = f2['lon'] - # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(nj=yind,ni=xind) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdomain_in - - wfile=self.fdomain_out - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - - - def create_surfdata_at_reg(self): - #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating surface dataset file at region:", self.tag) - # create 1d coordinate variables to enable sel() method - filename = self.fsurf_in - f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') - lat = f2['lat'] - lon = f2['lon'] - # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(lsmlat=yind,lsmlon=xind) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fsurf_in - - # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode='w') - print('created file (fsurf_out)'+self.fsurf_out) - #f1.close(); - f2.close(); f3.close() - - - def create_landuse_at_reg (self): - #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating surface dataset file at region:",self.tag) - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') - lat = f2['lat'] - lon = f2['lon'] - # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(lsmlat=yind,lsmlon=xind) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fluse_in - - wfile=self.fluse_out - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - def setup_logging(log_file, log_level): """ From 60d949497bb1dd033c96815498a291151141b5c9 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 12:20:40 -0600 Subject: [PATCH 004/223] adding the top-level skeleton. --- tools/site_and_regional/subset_data | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/tools/site_and_regional/subset_data b/tools/site_and_regional/subset_data index 34399bdaea..d237d101e4 100755 --- a/tools/site_and_regional/subset_data +++ b/tools/site_and_regional/subset_data @@ -1,18 +1,23 @@ #!/usr/bin/env python3 """ +This is a just top-level skeleton script that calls +subset_data.py. +The original code (subset_data.py) is located under +python/ctsm folder. +For full instructions on how to run the code and different options, +please check python/ctsm/subset_data.py file. """ import os import sys -_CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), - os.pardir, - os.pardir, - 'python') -#print (_CTSM_PYTHON) +# -- add python/ctsm to path +_CTSM_PYTHON = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" +) +# print (_CTSM_PYTHON) sys.path.insert(1, _CTSM_PYTHON) - from ctsm.subset_data import main if __name__ == "__main__": From 50f8727aab2e5a2e68ec8e5dcb6a749ab67aedc5 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 12:47:23 -0600 Subject: [PATCH 005/223] adding classes for subset_data.py --- python/ctsm/site_and_regional/__init__.py | 0 python/ctsm/site_and_regional/base_case.py | 127 +++++++++ .../ctsm/site_and_regional/regional_case.py | 100 +++++++ .../site_and_regional/single_point_case.py | 244 ++++++++++++++++++ 4 files changed, 471 insertions(+) create mode 100644 python/ctsm/site_and_regional/__init__.py create mode 100644 python/ctsm/site_and_regional/base_case.py create mode 100644 python/ctsm/site_and_regional/regional_case.py create mode 100644 python/ctsm/site_and_regional/single_point_case.py diff --git a/python/ctsm/site_and_regional/__init__.py b/python/ctsm/site_and_regional/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py new file mode 100644 index 0000000000..eb4e48853c --- /dev/null +++ b/python/ctsm/site_and_regional/base_case.py @@ -0,0 +1,127 @@ +import os +import numpy as np +import xarray as xr +import subprocess + +from datetime import date +from getpass import getuser + +myname = getuser() + +class BaseCase : + """ + Parent class to SinglePointCase and RegionalCase + + ... + + Attributes + ---------- + create_domain : bool + flag for creating domain file + create_surfdata : bool + flag for creating surface dataset + create_landuse : bool + flag for creating landuse file + create_datm : bool + flag for creating DATM files + + Methods + ------- + create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) + create 1d coordinate variables to enable sel() method + + add_tag_to_filename(filename, tag) + add a tag and timetag to a filename ending with + [._]cYYMMDD.nc or [._]YYMMDD.nc + """ + def __init__(self, create_domain, create_surfdata, create_landuse, create_datm): + self.create_domain = create_domain + self.create_surfdata = create_surfdata + self.create_landuse = create_landuse + self.create_datm = create_datm + + def __str__(self): + return str(self.__class__) + '\n' + '\n'.join((str(item) + ' = ' + str(self.__dict__[item]) + for item in sorted(self.__dict__))) + + @staticmethod + def create_1d_coord(filename, lon_varname , lat_varname , x_dim , y_dim): + """ + lon_varname : variable name that has 2d lon + lat_varname : variable name that has 2d lat + x_dim: dimension name in X -- lon + y_dim: dimension name in Y -- lat + """ + print( "Open file: "+filename ) + f1 = xr.open_dataset(filename) + + # create 1d coordinate variables to enable sel() method + lon0 = np.asarray(f1[lon_varname][0,:]) + lat0 = np.asarray(f1[lat_varname][:,0]) + lon = xr.DataArray(lon0,name='lon',dims=x_dim,coords={x_dim:lon0}) + lat = xr.DataArray(lat0,name='lat',dims=y_dim,coords={y_dim:lat0}) + + f2=f1.assign({'lon':lon,'lat':lat}) + + f2.reset_coords([lon_varname,lat_varname]) + f1.close() + return f2 + + @staticmethod + def add_tag_to_filename(filename, tag): + """ + Add a tag and replace timetag of a filename + # Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc + # Add the tag to just before that ending part + # and change the ending part to the current time tag + """ + basename = os.path.basename(filename) + cend = -10 + if ( basename[cend] == "c" ): + cend = cend - 1 + if ( (basename[cend] != ".") and (basename[cend] != "_") ): + print ( "Trouble figuring out where to add tag to filename:"+filename ) + os.abort() + today = date.today() + today_string = today.strftime("%y%m%d") + return( basename[:cend]+"_"+tag+"_c"+today_string +'.nc') + + def update_metadata(self, nc): + #update attributes + today = date.today() + today_string = today.strftime("%Y-%m-%d") + + #get git hash + sha = self.get_git_sha() + + nc.attrs['Created_on'] = today_string + nc.attrs['Created_by'] = myname + nc.attrs['Created_with'] = os.path.abspath(__file__) + " -- "+sha + + #delete unrelated attributes if they exist + del_attrs = ['source_code', 'SVN_url', 'hostname', 'history' + 'History_Log', 'Logname', 'Host', 'Version', + 'Compiler_Optimized'] + attr_list = nc.attrs + + for attr in del_attrs: + if attr in attr_list: + #print ("This attr should be deleted:", attr) + del(nc.attrs[attr]) + + + #for attr, value in attr_list.items(): + # print (attr + " = "+str(value)) + + + @staticmethod + def get_git_sha(): + """ + Returns Git short SHA for the currect directory. + """ + try: + sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() + except subprocess.CalledProcessError: + sha = "NOT-A-GIT-REPOSITORY" + return sha + diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py new file mode 100644 index 0000000000..c35422a54d --- /dev/null +++ b/python/ctsm/site_and_regional/regional_case.py @@ -0,0 +1,100 @@ +from ctsm.site_and_regional.base_case import BaseCase + +import numpy as np +import xarray as xr + +class RegionalCase (BaseCase): + """ + A case to encapsulate regional cases. + """ + + def __init__(self, lat1, lat2, lon1, lon2, reg_name, + create_domain, create_surfdata, create_landuse, create_datm): + super().__init__(create_domain, create_surfdata, create_landuse, create_datm) + self.lat1 = lat1 + self.lat2 = lat2 + self.lon1 = lon1 + self.lon2 = lon2 + self.reg_name = reg_name + + def create_tag(self): + if self.reg_name: + self.tag = self.reg_name + else: + self.tag=str(self.lon1)+'-'+str(self.lon2)+'_'+str(self.lat1)+'-'+str(self.lat2) + + def create_domain_at_reg (self): + #print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print ("Creating domain file at region:", self.tag) + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') + lat = f2['lat'] + lon = f2['lon'] + # subset longitude and latitude arrays + xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3=f2.isel(nj=yind,ni=xind) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fdomain_in + + wfile=self.fdomain_out + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdomain_out)'+self.fdomain_out) + f2.close(); f3.close() + + + + def create_surfdata_at_reg(self): + #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print ("Creating surface dataset file at region:", self.tag) + # create 1d coordinate variables to enable sel() method + filename = self.fsurf_in + f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') + lat = f2['lat'] + lon = f2['lon'] + # subset longitude and latitude arrays + xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3=f2.isel(lsmlat=yind,lsmlon=xind) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fsurf_in + + # mode 'w' overwrites file + f3.to_netcdf(path=self.fsurf_out, mode='w') + print('created file (fsurf_out)'+self.fsurf_out) + #f1.close(); + f2.close(); f3.close() + + + def create_landuse_at_reg (self): + #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print ("Creating surface dataset file at region:",self.tag) + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') + lat = f2['lat'] + lon = f2['lon'] + # subset longitude and latitude arrays + xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3=f2.isel(lsmlat=yind,lsmlon=xind) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fluse_in + + wfile=self.fluse_out + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdomain_out)'+self.fdomain_out) + f2.close(); f3.close() + + + + + + diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py new file mode 100644 index 0000000000..1fd5fbdcf7 --- /dev/null +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -0,0 +1,244 @@ +from ctsm.site_and_regional.base_case import BaseCase +import os +import numpy as np +import xarray as xr +from datetime import date + +class SinglePointCase (BaseCase): + """ + A case to encapsulate single point cases. + + ... + + Attributes + ---------- + plat : float + latitude + plon : float + longitude + site_name: str -- default = None + Site name + + Methods + ------- + create_tag + create a tag for single point which is the site name + or the "lon-lat" format if the site name does not exist. + + create_domain_at_point + Create domain file at a single point. + create_landuse_at_point: + Create landuse file at a single point. + create_surfdata_at_point: + Create surface dataset at a single point. + create_datmdomain_at_point: + Create DATM domain file at a single point. + """ + + def __init__(self, plat, plon,site_name, + create_domain, create_surfdata, create_landuse, create_datm, + overwrite_single_pft, dominant_pft, zero_nonveg_landunits, + uniform_snowpack, no_saturation_excess): + super().__init__(create_domain, create_surfdata, create_landuse, create_datm) + self.plat = plat + self.plon = plon + self.site_name = site_name + self.overwrite_single_pft = overwrite_single_pft + self.dominant_pft = dominant_pft + self.zero_nonveg_landunits = zero_nonveg_landunits + self.uniform_snowpack = uniform_snowpack + self.no_saturation_excess = no_saturation_excess + + def create_tag(self): + if self.site_name: + self.tag = self.site_name + else: + self.tag=str(self.plon)+'_'+str(self.plat) + + @staticmethod + def create_fileout_name( filename,tag): + + basename = os.path.basename(filename) + items = basename.split('_') + today = date.today() + today_string = today.strftime("%y%m%d") + new_string = items[0]+"_"+items[2]+"_"+items[3]+"_"+ items[4] \ + +"_"+items[5]+"_"+items[6]+"_"+tag+"_c"+today_string+".nc" + return new_string + + def create_domain_at_point (self): + print( "----------------------------------------------------------------------") + print ("Creating domain file at ", self.plon, self.plat) + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') + # extract gridcell closest to plon/plat + f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['nj','ni']) + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fdomain_in + + wfile=self.fdomain_out + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdomain_out)'+self.fdomain_out) + f2.close(); f3.close() + + + + def create_landuse_at_point (self): + print( "----------------------------------------------------------------------") + print ("Creating landuse file at ", self.plon, self.plat, ".") + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') + # extract gridcell closest to plon/plat + f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') + + # expand dimensions + f3 = f3.expand_dims(['lsmlat','lsmlon']) + # specify dimension order + #f3 = f3.transpose('time','lat','lon') + f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') + #f3['YEAR'] = f3['YEAR'].squeeze() + + # revert expand dimensions of YEAR + year = np.squeeze(np.asarray(f3['YEAR'])) + x = xr.DataArray(year, coords={'time':f3['time']}, dims='time', name='YEAR') + x.attrs['units']='unitless' + x.attrs['long_name']='Year of PFT data' + f3['YEAR'] = x + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fluse_in + + wfile = self.fluse_out + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (luse_out)'+self.fluse_out,".") + f2.close(); f3.close() + + + def create_surfdata_at_point(self): + print( "----------------------------------------------------------------------") + print ("Creating surface dataset file at ", self.plon, self.plat, ".") + # create 1d coordinate variables to enable sel() method + filename = self.fsurf_in + f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') + # extract gridcell closest to plon/plat + f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['lsmlat','lsmlon']).copy(deep=True) + + # modify surface data properties + if self.overwrite_single_pft: + f3['PCT_NAT_PFT'][:,:,:] = 0 + f3['PCT_NAT_PFT'][:,:,self.dominant_pft] = 100 + if self.zero_nonveg_landunits: + f3['PCT_NATVEG'][:,:] = 100 + f3['PCT_CROP'][:,:] = 0 + f3['PCT_LAKE'][:,:] = 0. + f3['PCT_WETLAND'][:,:] = 0. + f3['PCT_URBAN'][:,:,] = 0. + f3['PCT_GLACIER'][:,:] = 0. + if self.uniform_snowpack: + f3['STD_ELEV'][:,:] = 20. + if self.no_saturation_excess: + f3['FMAX'][:,:] = 0. + + # specify dimension order + #f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') + f3 = f3.transpose(u'time', u'cft', u'lsmpft', u'natpft', u'nglcec', u'nglcecp1', u'nlevsoi', u'nlevurb', u'numrad', u'numurbl', 'lsmlat', 'lsmlon') + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fsurf_in + del(f3.attrs['History_Log']) + # mode 'w' overwrites file + f3.to_netcdf(path=self.fsurf_out, mode='w') + print('Successfully created file (fsurf_out) :'+self.fsurf_out) + f2.close(); f3.close() + + def create_datmdomain_at_point(self): + print( "----------------------------------------------------------------------") + print("Creating DATM domain file at ", self.plon, self.plat, ".") + # create 1d coordinate variables to enable sel() method + filename = self.fdatmdomain_in + f2 = self.create_1d_coord(filename, 'xc','yc','ni','nj') + # extract gridcell closest to plon/plat + f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['nj','ni']) + wfile=self.fdatmdomain_out + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = self.fdatmdomain_in + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode='w') + print('Successfully created file (fdatmdomain_out) :'+self.fdatmdomain_out) + f2.close(); f3.close() + + def extract_datm_at(self, file_in, file_out): + # create 1d coordinate variables to enable sel() method + f2 = self.create_1d_coord(file_in, 'LONGXY','LATIXY','lon','lat') + # extract gridcell closest to plon/plat + f3 = f2.sel(lon=self.plon,lat=self.plat,method='nearest') + # expand dimensions + f3 = f3.expand_dims(['lat','lon']) + # specify dimension order + f3 = f3.transpose(u'scalar','time','lat','lon') + + #update attributes + self.update_metadata(f3) + f3.attrs['Created_from'] = file_in + # mode 'w' overwrites file + f3.to_netcdf(path=file_out, mode='w') + print('Successfully created file :'+ file_out) + f2.close(); f3.close() + + def create_datm_at_point(self): + print( "----------------------------------------------------------------------") + print("Creating DATM files at ", self.plon, self.plat, ".") + #-- specify subdirectory names and filename prefixes + solrdir = 'Solar/' + precdir = 'Precip/' + tpqwldir = 'TPHWL/' + prectag = 'clmforc.GSWP3.c2011.0.5x0.5.Prec.' + solrtag = 'clmforc.GSWP3.c2011.0.5x0.5.Solr.' + tpqwtag = 'clmforc.GSWP3.c2011.0.5x0.5.TPQWL.' + + #-- create data files + infile=[] + outfile=[] + for y in range(self.datm_syr,self.datm_eyr+1): + ystr=str(y) + for m in range(1,13): + mstr=str(m) + if m < 10: + mstr='0'+mstr + + dtag=ystr+'-'+mstr + + fsolar=self.dir_input_datm+solrdir+solrtag+dtag+'.nc' + fsolar2=self.dir_output_datm+solrtag+self.tag+'.'+dtag+'.nc' + fprecip=self.dir_input_datm+precdir+prectag+dtag+'.nc' + fprecip2=self.dir_output_datm+prectag+self.tag+'.'+dtag+'.nc' + ftpqw=self.dir_input_datm+tpqwldir+tpqwtag+dtag+'.nc' + ftpqw2=self.dir_output_datm+tpqwtag+self.tag+'.'+dtag+'.nc' + + infile+=[fsolar,fprecip,ftpqw] + outfile+=[fsolar2,fprecip2,ftpqw2] + + nm=len(infile) + for n in range(nm): + print(outfile[n]) + file_in = infile[n] + file_out = outfile[n] + self.extract_datm_at(file_in, file_out) + + + print('All DATM files are created in: '+self.dir_output_datm) + + + From 093a0ed3958e1ad40bd1d1c5f8afef25ec00569f Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 12:48:13 -0600 Subject: [PATCH 006/223] moving this under site_and_regional. --- python/ctsm/base_case.py | 127 ---------------- python/ctsm/regional_case.py | 100 ------------- python/ctsm/single_point_case.py | 244 ------------------------------- 3 files changed, 471 deletions(-) delete mode 100644 python/ctsm/base_case.py delete mode 100644 python/ctsm/regional_case.py delete mode 100644 python/ctsm/single_point_case.py diff --git a/python/ctsm/base_case.py b/python/ctsm/base_case.py deleted file mode 100644 index eb4e48853c..0000000000 --- a/python/ctsm/base_case.py +++ /dev/null @@ -1,127 +0,0 @@ -import os -import numpy as np -import xarray as xr -import subprocess - -from datetime import date -from getpass import getuser - -myname = getuser() - -class BaseCase : - """ - Parent class to SinglePointCase and RegionalCase - - ... - - Attributes - ---------- - create_domain : bool - flag for creating domain file - create_surfdata : bool - flag for creating surface dataset - create_landuse : bool - flag for creating landuse file - create_datm : bool - flag for creating DATM files - - Methods - ------- - create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) - create 1d coordinate variables to enable sel() method - - add_tag_to_filename(filename, tag) - add a tag and timetag to a filename ending with - [._]cYYMMDD.nc or [._]YYMMDD.nc - """ - def __init__(self, create_domain, create_surfdata, create_landuse, create_datm): - self.create_domain = create_domain - self.create_surfdata = create_surfdata - self.create_landuse = create_landuse - self.create_datm = create_datm - - def __str__(self): - return str(self.__class__) + '\n' + '\n'.join((str(item) + ' = ' + str(self.__dict__[item]) - for item in sorted(self.__dict__))) - - @staticmethod - def create_1d_coord(filename, lon_varname , lat_varname , x_dim , y_dim): - """ - lon_varname : variable name that has 2d lon - lat_varname : variable name that has 2d lat - x_dim: dimension name in X -- lon - y_dim: dimension name in Y -- lat - """ - print( "Open file: "+filename ) - f1 = xr.open_dataset(filename) - - # create 1d coordinate variables to enable sel() method - lon0 = np.asarray(f1[lon_varname][0,:]) - lat0 = np.asarray(f1[lat_varname][:,0]) - lon = xr.DataArray(lon0,name='lon',dims=x_dim,coords={x_dim:lon0}) - lat = xr.DataArray(lat0,name='lat',dims=y_dim,coords={y_dim:lat0}) - - f2=f1.assign({'lon':lon,'lat':lat}) - - f2.reset_coords([lon_varname,lat_varname]) - f1.close() - return f2 - - @staticmethod - def add_tag_to_filename(filename, tag): - """ - Add a tag and replace timetag of a filename - # Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc - # Add the tag to just before that ending part - # and change the ending part to the current time tag - """ - basename = os.path.basename(filename) - cend = -10 - if ( basename[cend] == "c" ): - cend = cend - 1 - if ( (basename[cend] != ".") and (basename[cend] != "_") ): - print ( "Trouble figuring out where to add tag to filename:"+filename ) - os.abort() - today = date.today() - today_string = today.strftime("%y%m%d") - return( basename[:cend]+"_"+tag+"_c"+today_string +'.nc') - - def update_metadata(self, nc): - #update attributes - today = date.today() - today_string = today.strftime("%Y-%m-%d") - - #get git hash - sha = self.get_git_sha() - - nc.attrs['Created_on'] = today_string - nc.attrs['Created_by'] = myname - nc.attrs['Created_with'] = os.path.abspath(__file__) + " -- "+sha - - #delete unrelated attributes if they exist - del_attrs = ['source_code', 'SVN_url', 'hostname', 'history' - 'History_Log', 'Logname', 'Host', 'Version', - 'Compiler_Optimized'] - attr_list = nc.attrs - - for attr in del_attrs: - if attr in attr_list: - #print ("This attr should be deleted:", attr) - del(nc.attrs[attr]) - - - #for attr, value in attr_list.items(): - # print (attr + " = "+str(value)) - - - @staticmethod - def get_git_sha(): - """ - Returns Git short SHA for the currect directory. - """ - try: - sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() - except subprocess.CalledProcessError: - sha = "NOT-A-GIT-REPOSITORY" - return sha - diff --git a/python/ctsm/regional_case.py b/python/ctsm/regional_case.py deleted file mode 100644 index 2ff633f23a..0000000000 --- a/python/ctsm/regional_case.py +++ /dev/null @@ -1,100 +0,0 @@ -from base_case import BaseCase - -import numpy as np -import xarray as xr - -class RegionalCase (BaseCase): - """ - A case to encapsulate regional cases. - """ - - def __init__(self, lat1, lat2, lon1, lon2, reg_name, - create_domain, create_surfdata, create_landuse, create_datm): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm) - self.lat1 = lat1 - self.lat2 = lat2 - self.lon1 = lon1 - self.lon2 = lon2 - self.reg_name = reg_name - - def create_tag(self): - if self.reg_name: - self.tag = self.reg_name - else: - self.tag=str(self.lon1)+'-'+str(self.lon2)+'_'+str(self.lat1)+'-'+str(self.lat2) - - def create_domain_at_reg (self): - #print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating domain file at region:", self.tag) - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') - lat = f2['lat'] - lon = f2['lon'] - # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(nj=yind,ni=xind) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdomain_in - - wfile=self.fdomain_out - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - - - - def create_surfdata_at_reg(self): - #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating surface dataset file at region:", self.tag) - # create 1d coordinate variables to enable sel() method - filename = self.fsurf_in - f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') - lat = f2['lat'] - lon = f2['lon'] - # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(lsmlat=yind,lsmlon=xind) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fsurf_in - - # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode='w') - print('created file (fsurf_out)'+self.fsurf_out) - #f1.close(); - f2.close(); f3.close() - - - def create_landuse_at_reg (self): - #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating surface dataset file at region:",self.tag) - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') - lat = f2['lat'] - lon = f2['lon'] - # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(lsmlat=yind,lsmlon=xind) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fluse_in - - wfile=self.fluse_out - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - - - - - - diff --git a/python/ctsm/single_point_case.py b/python/ctsm/single_point_case.py deleted file mode 100644 index 3d56ee7827..0000000000 --- a/python/ctsm/single_point_case.py +++ /dev/null @@ -1,244 +0,0 @@ -from base_case import BaseCase -import os -import numpy as np -import xarray as xr -from datetime import date - -class SinglePointCase (BaseCase): - """ - A case to encapsulate single point cases. - - ... - - Attributes - ---------- - plat : float - latitude - plon : float - longitude - site_name: str -- default = None - Site name - - Methods - ------- - create_tag - create a tag for single point which is the site name - or the "lon-lat" format if the site name does not exist. - - create_domain_at_point - Create domain file at a single point. - create_landuse_at_point: - Create landuse file at a single point. - create_surfdata_at_point: - Create surface dataset at a single point. - create_datmdomain_at_point: - Create DATM domain file at a single point. - """ - - def __init__(self, plat, plon,site_name, - create_domain, create_surfdata, create_landuse, create_datm, - overwrite_single_pft, dominant_pft, zero_nonveg_landunits, - uniform_snowpack, no_saturation_excess): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm) - self.plat = plat - self.plon = plon - self.site_name = site_name - self.overwrite_single_pft = overwrite_single_pft - self.dominant_pft = dominant_pft - self.zero_nonveg_landunits = zero_nonveg_landunits - self.uniform_snowpack = uniform_snowpack - self.no_saturation_excess = no_saturation_excess - - def create_tag(self): - if self.site_name: - self.tag = self.site_name - else: - self.tag=str(self.plon)+'_'+str(self.plat) - - @staticmethod - def create_fileout_name( filename,tag): - - basename = os.path.basename(filename) - items = basename.split('_') - today = date.today() - today_string = today.strftime("%y%m%d") - new_string = items[0]+"_"+items[2]+"_"+items[3]+"_"+ items[4] \ - +"_"+items[5]+"_"+items[6]+"_"+tag+"_c"+today_string+".nc" - return new_string - - def create_domain_at_point (self): - print( "----------------------------------------------------------------------") - print ("Creating domain file at ", self.plon, self.plat) - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') - # extract gridcell closest to plon/plat - f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['nj','ni']) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdomain_in - - wfile=self.fdomain_out - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - - - - def create_landuse_at_point (self): - print( "----------------------------------------------------------------------") - print ("Creating landuse file at ", self.plon, self.plat, ".") - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') - # extract gridcell closest to plon/plat - f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') - - # expand dimensions - f3 = f3.expand_dims(['lsmlat','lsmlon']) - # specify dimension order - #f3 = f3.transpose('time','lat','lon') - f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') - #f3['YEAR'] = f3['YEAR'].squeeze() - - # revert expand dimensions of YEAR - year = np.squeeze(np.asarray(f3['YEAR'])) - x = xr.DataArray(year, coords={'time':f3['time']}, dims='time', name='YEAR') - x.attrs['units']='unitless' - x.attrs['long_name']='Year of PFT data' - f3['YEAR'] = x - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fluse_in - - wfile = self.fluse_out - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (luse_out)'+self.fluse_out,".") - f2.close(); f3.close() - - - def create_surfdata_at_point(self): - print( "----------------------------------------------------------------------") - print ("Creating surface dataset file at ", self.plon, self.plat, ".") - # create 1d coordinate variables to enable sel() method - filename = self.fsurf_in - f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') - # extract gridcell closest to plon/plat - f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['lsmlat','lsmlon']).copy(deep=True) - - # modify surface data properties - if self.overwrite_single_pft: - f3['PCT_NAT_PFT'][:,:,:] = 0 - f3['PCT_NAT_PFT'][:,:,self.dominant_pft] = 100 - if self.zero_nonveg_landunits: - f3['PCT_NATVEG'][:,:] = 100 - f3['PCT_CROP'][:,:] = 0 - f3['PCT_LAKE'][:,:] = 0. - f3['PCT_WETLAND'][:,:] = 0. - f3['PCT_URBAN'][:,:,] = 0. - f3['PCT_GLACIER'][:,:] = 0. - if self.uniform_snowpack: - f3['STD_ELEV'][:,:] = 20. - if self.no_saturation_excess: - f3['FMAX'][:,:] = 0. - - # specify dimension order - #f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') - f3 = f3.transpose(u'time', u'cft', u'lsmpft', u'natpft', u'nglcec', u'nglcecp1', u'nlevsoi', u'nlevurb', u'numrad', u'numurbl', 'lsmlat', 'lsmlon') - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fsurf_in - del(f3.attrs['History_Log']) - # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode='w') - print('Successfully created file (fsurf_out) :'+self.fsurf_out) - f2.close(); f3.close() - - def create_datmdomain_at_point(self): - print( "----------------------------------------------------------------------") - print("Creating DATM domain file at ", self.plon, self.plat, ".") - # create 1d coordinate variables to enable sel() method - filename = self.fdatmdomain_in - f2 = self.create_1d_coord(filename, 'xc','yc','ni','nj') - # extract gridcell closest to plon/plat - f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['nj','ni']) - wfile=self.fdatmdomain_out - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdatmdomain_in - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdatmdomain_out) :'+self.fdatmdomain_out) - f2.close(); f3.close() - - def extract_datm_at(self, file_in, file_out): - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(file_in, 'LONGXY','LATIXY','lon','lat') - # extract gridcell closest to plon/plat - f3 = f2.sel(lon=self.plon,lat=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['lat','lon']) - # specify dimension order - f3 = f3.transpose(u'scalar','time','lat','lon') - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = file_in - # mode 'w' overwrites file - f3.to_netcdf(path=file_out, mode='w') - print('Successfully created file :'+ file_out) - f2.close(); f3.close() - - def create_datm_at_point(self): - print( "----------------------------------------------------------------------") - print("Creating DATM files at ", self.plon, self.plat, ".") - #-- specify subdirectory names and filename prefixes - solrdir = 'Solar/' - precdir = 'Precip/' - tpqwldir = 'TPHWL/' - prectag = 'clmforc.GSWP3.c2011.0.5x0.5.Prec.' - solrtag = 'clmforc.GSWP3.c2011.0.5x0.5.Solr.' - tpqwtag = 'clmforc.GSWP3.c2011.0.5x0.5.TPQWL.' - - #-- create data files - infile=[] - outfile=[] - for y in range(self.datm_syr,self.datm_eyr+1): - ystr=str(y) - for m in range(1,13): - mstr=str(m) - if m < 10: - mstr='0'+mstr - - dtag=ystr+'-'+mstr - - fsolar=self.dir_input_datm+solrdir+solrtag+dtag+'.nc' - fsolar2=self.dir_output_datm+solrtag+self.tag+'.'+dtag+'.nc' - fprecip=self.dir_input_datm+precdir+prectag+dtag+'.nc' - fprecip2=self.dir_output_datm+prectag+self.tag+'.'+dtag+'.nc' - ftpqw=self.dir_input_datm+tpqwldir+tpqwtag+dtag+'.nc' - ftpqw2=self.dir_output_datm+tpqwtag+self.tag+'.'+dtag+'.nc' - - infile+=[fsolar,fprecip,ftpqw] - outfile+=[fsolar2,fprecip2,ftpqw2] - - nm=len(infile) - for n in range(nm): - print(outfile[n]) - file_in = infile[n] - file_out = outfile[n] - self.extract_datm_at(file_in, file_out) - - - print('All DATM files are created in: '+self.dir_output_datm) - - - From b4bb64ed1d8903a7e521c88a742d40b76fc046c7 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 12:49:12 -0600 Subject: [PATCH 007/223] moving the classes under subset_data. --- python/ctsm/subset_data.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index d0d2726d07..f02fab3211 100755 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -88,9 +88,17 @@ from getpass import getuser from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter -from base_case import BaseCase -from single_point_case import SinglePointCase -from regional_case import RegionalCase +# -- add python/ctsm to path +_CTSM_PYTHON = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" + ) +# print (_CTSM_PYTHON) +sys.path.insert(1, _CTSM_PYTHON) + + +from ctsm.site_and_regional.base_case import BaseCase +from ctsm.site_and_regional.single_point_case import SinglePointCase +from ctsm.site_and_regional.regional_case import RegionalCase myname = getuser() @@ -532,6 +540,7 @@ def main (): print (single_point) + #output_to_logger (single_point) if crop_flag: num_pft = "78" From bf8026a8af0f1edeb89e4575f10c31d05542a19c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 12:49:49 -0600 Subject: [PATCH 008/223] adding some more instructions... --- tools/site_and_regional/subset_data | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tools/site_and_regional/subset_data b/tools/site_and_regional/subset_data index d237d101e4..0c4cb9f28d 100755 --- a/tools/site_and_regional/subset_data +++ b/tools/site_and_regional/subset_data @@ -6,6 +6,16 @@ The original code (subset_data.py) is located under python/ctsm folder. For full instructions on how to run the code and different options, please check python/ctsm/subset_data.py file. + +This script extracts domain files, surface dataset, and DATM files +at either a single point or a region using the global dataset. + + + +To see all available options for single-point subsetting: + ./subset_data point --help +To see all available options for region subsetting: + ./subset_data region --help """ import os From 1f45c144f6cc2fa2e77b5ee83b4bd3b2b87e88fa Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 12:52:02 -0600 Subject: [PATCH 009/223] removing subset_data.py under site_and_regional...why not git mv :/ --- tools/site_and_regional/subset_data.py | 1146 ------------------------ 1 file changed, 1146 deletions(-) delete mode 100755 tools/site_and_regional/subset_data.py diff --git a/tools/site_and_regional/subset_data.py b/tools/site_and_regional/subset_data.py deleted file mode 100755 index 8b26e24f35..0000000000 --- a/tools/site_and_regional/subset_data.py +++ /dev/null @@ -1,1146 +0,0 @@ -#! /usr/bin/env python -""" -|------------------------------------------------------------------| -|--------------------- Instructions -----------------------------| -|------------------------------------------------------------------| - -Instructions for running on Cheyenne/Casper: - -load the following into your local environment - module load python - ncar_pylib - -------------------------------------------------------------------- -To see the available options for single point cases: - ./subset_data.py point --help - -To see the available options for regional cases: - ./subset_data.py reg --help -------------------------------------------------------------------- - -This script extracts domain files, surface dataset, and DATM files -at either a single point or a region using the global dataset. - -After creating a case using a global compset, run preview_namelist. -From the resulting lnd_in file in the run directory, find the name -of the domain file, and the surface data file. -From the datm streams files (e.g. datm.streams.txt.CLMGSWP3v1.Precip) -find the name of the datm forcing data domain file and forcing files. -Use these file names as the sources for the single point/regional -files to be created (see below). - -After running this script, point to the new CLM domain and surface -dataset using the user_nl_clm file in the case directory. In addition, -copy the datm.streams files to the case directory, with the prefix -'user_', e.g. user_datm.streams.txt.CLMGSWP3v1.Precip. Change the -information in the user_datm.streams* files to point to the single -point datm data (domain and forcing files) created using this script. - -The domain file is not set via user_nl_clm, but requires changing -LND_DOMAIN and ATM_DOMAIN (and their paths) in env_run.xml. - -Using single point forcing data requires specifying the nearest -neighbor mapping algorithm for the datm streams (usually they are -the first three in the list) in user_nl_datm: mapalgo = 'nn','nn','nn', -..., where the '...' can still be 'bilinear', etc, depending on the -other streams that are being used, e.g. aerosols, anomaly forcing, -bias correction. - -The file env_mach_pes.xml should be modified to specify a single -processor. The mpi-serial libraries should also be used, and can be -set in env_build.xml by changing "MPILIB" to "mpi-serial" prior to -setting up the case. - -The case for the single point simulation should have river routing -and land ice models turned off (i.e. the compset should use stub -models SROF and SGLC) - -------------------------------------------------------------------- -To run the script for a single point: - ./subset_data.py point - -To run the script for a region: - ./subset_data.py reg - -To remove NPL from your environment on Cheyenne/Casper: - deactivate -------------------------------------------------------------------- - -""" -# TODO -# Automatic downloading of missing files if they are missing -# default 78 pft vs 16 pft - -# Import libraries -from __future__ import print_function - -import sys -import os -import string -import logging -import subprocess -import argparse - -import numpy as np -import xarray as xr - -from datetime import date -from getpass import getuser -from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter - -myname = getuser() - -def get_parser(): - """Get parser object for this script.""" - #parser = ArgumentParser(description=__doc__, - # formatter_class=ArgumentDefaultsHelpFormatter) - parser = ArgumentParser(description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - - parser.print_usage = parser.print_help - subparsers = parser.add_subparsers( - help='Two possible ways to run this sript, either:', - dest ='run_type') - pt_parser = subparsers.add_parser('point', - help = 'Run script for a single point.') - rg_parser = subparsers.add_parser('reg', - help = 'Run script for a region.') - - - pt_parser.add_argument('--lat', - help='Single point latitude. [default: %(default)s]', - action="store", - dest="plat", - required=False, - type = plat_type, - default=42.5) - pt_parser.add_argument('--lon', - help='Single point longitude. [default: %(default)s]', - action="store", - dest="plon", - required=False, - type = plon_type, - default= 287.8 ) - pt_parser.add_argument('--site', - help='Site name or tag. [default: %(default)s]', - action="store", - dest="site_name", - required = False, - type = str, - default = '') - pt_parser.add_argument('--create_domain', - help='Flag for creating CLM domain file at single point. [default: %(default)s]', - action="store", - dest="create_domain", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - pt_parser.add_argument('--create_surface', - help='Flag for creating surface data file at single point. [default: %(default)s]', - action="store", - dest="create_surfdata", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = True) - pt_parser.add_argument('--create_landuse', - help='Flag for creating landuse data file at single point. [default: %(default)s]', - action="store", - dest="create_landuse", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - pt_parser.add_argument('--create_datm', - help='Flag for creating DATM forcing data at single point. [default: %(default)s]', - action="store", - dest="create_datm", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - pt_parser.add_argument('--datm_syr', - help='Start year for creating DATM forcing at single point. [default: %(default)s]', - action="store", - dest="datm_syr", - required = False, - type = int, - default = 1901) - pt_parser.add_argument('--datm_eyr', - help='End year for creating DATM forcing at single point. [default: %(default)s]', - action="store", - dest="datm_eyr", - required = False, - type = int, - default = 2014) - pt_parser.add_argument('--crop', - help='Create datasets using the extensive list of prognostic crop types. [default: %(default)s]', - action="store_true", - dest="crop_flag", - default=False) - pt_parser.add_argument('--dompft', - help='Dominant PFT type . [default: %(default)s] ', - action="store", - dest="dom_pft", - type =int, - default=7) - pt_parser.add_argument('--no-unisnow', - help='Turn off the flag for create uniform snowpack. [default: %(default)s]', - action="store_false", - dest="uni_snow", - default=True) - pt_parser.add_argument('--no-overwrite_single_pft', - help='Turn off the flag for making the whole grid 100%% single PFT. [default: %(default)s]', - action="store_false", - dest="overwrite_single_pft", - default=True) - pt_parser.add_argument('--zero_nonveg', - help='Set all non-vegetation landunits to zero. [default: %(default)s]', - action="store", - dest="zero_nonveg", - type =bool, - default=True) - pt_parser.add_argument('--no_saturation_excess', - help='Turn off the flag for saturation excess. [default: %(default)s]', - action="store", - dest="no_saturation_excess", - type =bool, - default=True) - pt_parser.add_argument('--outdir', - help='Output directory. [default: %(default)s]', - action="store", - dest="out_dir", - type =str, - default="/glade/scratch/"+myname+"/single_point/") - - rg_parser.add_argument('--lat1', - help='Region start latitude. [default: %(default)s]', - action="store", - dest="lat1", - required=False, - type = plat_type, - default=-40) - rg_parser.add_argument('--lat2', - help='Region end latitude. [default: %(default)s]', - action="store", - dest="lat2", - required=False, - type = plat_type, - default=15) - rg_parser.add_argument('--lon1', - help='Region start longitude. [default: %(default)s]', - action="store", - dest="lon1", - required=False, - type = plon_type, - default= 275. ) - rg_parser.add_argument('--lon2', - help='Region end longitude. [default: %(default)s]', - action="store", - dest="lon2", - required=False, - type = plon_type, - default= 330. ) - rg_parser.add_argument('--reg', - help='Region name or tag. [default: %(default)s]', - action="store", - dest="reg_name", - required = False, - type = str, - default = '') - rg_parser.add_argument('--create_domain', - help='Flag for creating CLM domain file for a region. [default: %(default)s]', - action="store", - dest="create_domain", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - rg_parser.add_argument('--create_surface', - help='Flag for creating surface data file for a region. [default: %(default)s]', - action="store", - dest="create_surfdata", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = True) - rg_parser.add_argument('--create_landuse', - help='Flag for creating landuse data file for a region. [default: %(default)s]', - action="store", - dest="create_landuse", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - rg_parser.add_argument('--create_datm', - help='Flag for creating DATM forcing data for a region. [default: %(default)s]', - action="store", - dest="create_datm", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - rg_parser.add_argument('--datm_syr', - help='Start year for creating DATM forcing for a region. [default: %(default)s]', - action="store", - dest="datm_syr", - required = False, - type = int, - default = 1901) - rg_parser.add_argument('--datm_eyr', - help='End year for creating DATM forcing for a region. [default: %(default)s]', - action="store", - dest="datm_eyr", - required = False, - type = int, - default = 2014) - rg_parser.add_argument('--crop', - help='Create datasets using the extensive list of prognostic crop types. [default: %(default)s]', - action="store_true", - dest="crop_flag", - default=False) - rg_parser.add_argument('--dompft', - help='Dominant PFT type . [default: %(default)s] ', - action="store", - dest="dom_pft", - type =int, - default=7) - rg_parser.add_argument('--outdir', - help='Output directory. [default: %(default)s]', - action="store", - dest="out_dir", - type =str, - default="/glade/scratch/"+myname+"/regional/") - - return parser - -def str2bool(v): - """ - Function for converting different forms of - command line boolean strings to boolean value. - - Args: - v (str): String bool input - - Raises: - if the argument is not an acceptable boolean string - (such as yes or no ; true or false ; y or n ; t or f ; 0 or 1). - argparse.ArgumentTypeError: The string should be one of the mentioned values. - - Returns: - bool: Boolean value corresponding to the input. - """ - if isinstance(v, bool): - return v - if v.lower() in ('yes', 'true', 't', 'y', '1'): - return True - elif v.lower() in ('no', 'false', 'f', 'n', '0'): - return False - else: - raise argparse.ArgumentTypeError('Boolean value expected. [true or false] or [y or n]') - - -def plat_type(x): - """ - Function to define lat type for the parser - and - raise error if latitude is not between -90 and 90. - """ - x = float(x) - if (x < -90) or (x > 90): - raise argparse.ArgumentTypeError("ERROR: Latitude should be between -90 and 90.") - return x - - -def plon_type(x): - """ - Function to define lon type for the parser and - convert negative longitudes and - raise error if lon is not between -180 and 360. - """ - x = float(x) - if (-180 < x) and (x < 0): - print ("lon is :", lon) - x= x%360 - print ("after modulo lon is :", lon) - if (x < 0) or (x > 360): - raise argparse.ArgumentTypeError("ERROR: Latitude of single point should be between 0 and 360 or -180 and 180.") - return x - -def get_git_sha(): - """ - Returns Git short SHA for the currect directory. - """ - try: - sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() - except subprocess.CalledProcessError: - sha = "NOT-A-GIT-REPOSITORY" - return sha - -class BaseCase : - """ - Parent class to SinglePointCase and RegionalCase - - ... - - Attributes - ---------- - create_domain : bool - flag for creating domain file - create_surfdata : bool - flag for creating surface dataset - create_landuse : bool - flag for creating landuse file - create_datm : bool - flag for creating DATM files - - Methods - ------- - create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) - create 1d coordinate variables to enable sel() method - - add_tag_to_filename(filename, tag) - add a tag and timetag to a filename ending with - [._]cYYMMDD.nc or [._]YYMMDD.nc - """ - def __init__(self, create_domain, create_surfdata, create_landuse, create_datm): - self.create_domain = create_domain - self.create_surfdata = create_surfdata - self.create_landuse = create_landuse - self.create_datm = create_datm - - def __str__(self): - return str(self.__class__) + '\n' + '\n'.join((str(item) + ' = ' + str(self.__dict__[item]) - for item in sorted(self.__dict__))) - - @staticmethod - def create_1d_coord(filename, lon_varname , lat_varname , x_dim , y_dim): - """ - lon_varname : variable name that has 2d lon - lat_varname : variable name that has 2d lat - x_dim: dimension name in X -- lon - y_dim: dimension name in Y -- lat - """ - print( "Open file: "+filename ) - f1 = xr.open_dataset(filename) - - # create 1d coordinate variables to enable sel() method - lon0 = np.asarray(f1[lon_varname][0,:]) - lat0 = np.asarray(f1[lat_varname][:,0]) - lon = xr.DataArray(lon0,name='lon',dims=x_dim,coords={x_dim:lon0}) - lat = xr.DataArray(lat0,name='lat',dims=y_dim,coords={y_dim:lat0}) - - f2=f1.assign({'lon':lon,'lat':lat}) - - f2.reset_coords([lon_varname,lat_varname]) - f1.close() - return f2 - - @staticmethod - def add_tag_to_filename(filename, tag): - """ - Add a tag and replace timetag of a filename - # Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc - # Add the tag to just before that ending part - # and change the ending part to the current time tag - """ - basename = os.path.basename(filename) - cend = -10 - if ( basename[cend] == "c" ): - cend = cend - 1 - if ( (basename[cend] != ".") and (basename[cend] != "_") ): - print ( "Trouble figuring out where to add tag to filename:"+filename ) - os.abort() - today = date.today() - today_string = today.strftime("%y%m%d") - return( basename[:cend]+"_"+tag+"_c"+today_string +'.nc') - - @staticmethod - def update_metadata(nc): - #update attributes - today = date.today() - today_string = today.strftime("%Y-%m-%d") - - #get git hash - sha = get_git_sha() - - nc.attrs['Created_on'] = today_string - nc.attrs['Created_by'] = myname - nc.attrs['Created_with'] = os.path.abspath(__file__) + " -- "+sha - - #delete unrelated attributes if they exist - del_attrs = ['source_code', 'SVN_url', 'hostname', 'history' - 'History_Log', 'Logname', 'Host', 'Version', - 'Compiler_Optimized'] - attr_list = nc.attrs - - for attr in del_attrs: - if attr in attr_list: - #print ("This attr should be deleted:", attr) - del(nc.attrs[attr]) - - - #for attr, value in attr_list.items(): - # print (attr + " = "+str(value)) - - - -class SinglePointCase (BaseCase): - """ - A case to encapsulate single point cases. - - ... - - Attributes - ---------- - plat : float - latitude - plon : float - longitude - site_name: str -- default = None - Site name - - Methods - ------- - create_tag - create a tag for single point which is the site name - or the "lon-lat" format if the site name does not exist. - - create_domain_at_point - Create domain file at a single point. - create_landuse_at_point: - Create landuse file at a single point. - create_surfdata_at_point: - Create surface dataset at a single point. - create_datmdomain_at_point: - Create DATM domain file at a single point. - """ - - def __init__(self, plat, plon,site_name, - create_domain, create_surfdata, create_landuse, create_datm, - overwrite_single_pft, dominant_pft, zero_nonveg_landunits, - uniform_snowpack, no_saturation_excess): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm) - self.plat = plat - self.plon = plon - self.site_name = site_name - self.overwrite_single_pft = overwrite_single_pft - self.dominant_pft = dominant_pft - self.zero_nonveg_landunits = zero_nonveg_landunits - self.uniform_snowpack = uniform_snowpack - self.no_saturation_excess = no_saturation_excess - - def create_tag(self): - if self.site_name: - self.tag = self.site_name - else: - self.tag=str(self.plon)+'_'+str(self.plat) - - @staticmethod - def create_fileout_name( filename,tag): - - basename = os.path.basename(filename) - items = basename.split('_') - today = date.today() - today_string = today.strftime("%y%m%d") - new_string = items[0]+"_"+items[2]+"_"+items[3]+"_"+ items[4] \ - +"_"+items[5]+"_"+items[6]+"_"+tag+"_c"+today_string+".nc" - return new_string - - def create_domain_at_point (self): - print( "----------------------------------------------------------------------") - print ("Creating domain file at ", self.plon, self.plat) - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') - # extract gridcell closest to plon/plat - f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['nj','ni']) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdomain_in - - wfile=self.fdomain_out - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - - - def create_landuse_at_point (self): - print( "----------------------------------------------------------------------") - print ("Creating landuse file at ", self.plon, self.plat, ".") - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') - # extract gridcell closest to plon/plat - f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') - - # expand dimensions - f3 = f3.expand_dims(['lsmlat','lsmlon']) - # specify dimension order - #f3 = f3.transpose('time','lat','lon') - f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') - #f3['YEAR'] = f3['YEAR'].squeeze() - - # revert expand dimensions of YEAR - year = np.squeeze(np.asarray(f3['YEAR'])) - x = xr.DataArray(year, coords={'time':f3['time']}, dims='time', name='YEAR') - x.attrs['units']='unitless' - x.attrs['long_name']='Year of PFT data' - f3['YEAR'] = x - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fluse_in - - wfile = self.fluse_out - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (luse_out)'+self.fluse_out,".") - f2.close(); f3.close() - - def create_surfdata_at_point(self): - print( "----------------------------------------------------------------------") - print ("Creating surface dataset file at ", self.plon, self.plat, ".") - # create 1d coordinate variables to enable sel() method - filename = self.fsurf_in - f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') - # extract gridcell closest to plon/plat - f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['lsmlat','lsmlon']).copy(deep=True) - - # modify surface data properties - if self.overwrite_single_pft: - f3['PCT_NAT_PFT'][:,:,:] = 0 - f3['PCT_NAT_PFT'][:,:,self.dominant_pft] = 100 - if self.zero_nonveg_landunits: - f3['PCT_NATVEG'][:,:] = 100 - f3['PCT_CROP'][:,:] = 0 - f3['PCT_LAKE'][:,:] = 0. - f3['PCT_WETLAND'][:,:] = 0. - f3['PCT_URBAN'][:,:,] = 0. - f3['PCT_GLACIER'][:,:] = 0. - if self.uniform_snowpack: - f3['STD_ELEV'][:,:] = 20. - if self.no_saturation_excess: - f3['FMAX'][:,:] = 0. - - # specify dimension order - #f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') - f3 = f3.transpose(u'time', u'cft', u'lsmpft', u'natpft', u'nglcec', u'nglcecp1', u'nlevsoi', u'nlevurb', u'numrad', u'numurbl', 'lsmlat', 'lsmlon') - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fsurf_in - del(f3.attrs['History_Log']) - # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode='w') - print('Successfully created file (fsurf_out) :'+self.fsurf_out) - f2.close(); f3.close() - - def create_datmdomain_at_point(self): - print( "----------------------------------------------------------------------") - print("Creating DATM domain file at ", self.plon, self.plat, ".") - # create 1d coordinate variables to enable sel() method - filename = self.fdatmdomain_in - f2 = self.create_1d_coord(filename, 'xc','yc','ni','nj') - # extract gridcell closest to plon/plat - f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['nj','ni']) - wfile=self.fdatmdomain_out - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdatmdomain_in - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdatmdomain_out) :'+self.fdatmdomain_out) - f2.close(); f3.close() - - def extract_datm_at(self, file_in, file_out): - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(file_in, 'LONGXY','LATIXY','lon','lat') - # extract gridcell closest to plon/plat - f3 = f2.sel(lon=self.plon,lat=self.plat,method='nearest') - # expand dimensions - f3 = f3.expand_dims(['lat','lon']) - # specify dimension order - f3 = f3.transpose(u'scalar','time','lat','lon') - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = file_in - # mode 'w' overwrites file - f3.to_netcdf(path=file_out, mode='w') - print('Successfully created file :'+ file_out) - f2.close(); f3.close() - - def create_datm_at_point(self): - print( "----------------------------------------------------------------------") - print("Creating DATM files at ", self.plon, self.plat, ".") - #-- specify subdirectory names and filename prefixes - solrdir = 'Solar/' - precdir = 'Precip/' - tpqwldir = 'TPHWL/' - prectag = 'clmforc.GSWP3.c2011.0.5x0.5.Prec.' - solrtag = 'clmforc.GSWP3.c2011.0.5x0.5.Solr.' - tpqwtag = 'clmforc.GSWP3.c2011.0.5x0.5.TPQWL.' - - #-- create data files - infile=[] - outfile=[] - for y in range(self.datm_syr,self.datm_eyr+1): - ystr=str(y) - for m in range(1,13): - mstr=str(m) - if m < 10: - mstr='0'+mstr - - dtag=ystr+'-'+mstr - - fsolar=self.dir_input_datm+solrdir+solrtag+dtag+'.nc' - fsolar2=self.dir_output_datm+solrtag+self.tag+'.'+dtag+'.nc' - fprecip=self.dir_input_datm+precdir+prectag+dtag+'.nc' - fprecip2=self.dir_output_datm+prectag+self.tag+'.'+dtag+'.nc' - ftpqw=self.dir_input_datm+tpqwldir+tpqwtag+dtag+'.nc' - ftpqw2=self.dir_output_datm+tpqwtag+self.tag+'.'+dtag+'.nc' - - infile+=[fsolar,fprecip,ftpqw] - outfile+=[fsolar2,fprecip2,ftpqw2] - - nm=len(infile) - for n in range(nm): - print(outfile[n]) - file_in = infile[n] - file_out = outfile[n] - self.extract_datm_at(file_in, file_out) - - - print('All DATM files are created in: '+self.dir_output_datm) - -class RegionalCase (BaseCase): - """ - A case to encapsulate regional cases. - """ - - def __init__(self, lat1, lat2, lon1, lon2, reg_name, - create_domain, create_surfdata, create_landuse, create_datm): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm) - self.lat1 = lat1 - self.lat2 = lat2 - self.lon1 = lon1 - self.lon2 = lon2 - self.reg_name = reg_name - - def create_tag(self): - if self.reg_name: - self.tag = self.reg_name - else: - self.tag=str(self.lon1)+'-'+str(self.lon2)+'_'+str(self.lat1)+'-'+str(self.lat2) - - def create_domain_at_reg (self): - #print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating domain file at region:", self.tag) - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') - lat = f2['lat'] - lon = f2['lon'] - # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(nj=yind,ni=xind) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdomain_in - - wfile=self.fdomain_out - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - - - def create_surfdata_at_reg(self): - #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating surface dataset file at region:", self.tag) - # create 1d coordinate variables to enable sel() method - filename = self.fsurf_in - f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') - lat = f2['lat'] - lon = f2['lon'] - # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(lsmlat=yind,lsmlon=xind) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fsurf_in - - # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode='w') - print('created file (fsurf_out)'+self.fsurf_out) - #f1.close(); - f2.close(); f3.close() - - - def create_landuse_at_reg (self): - #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating surface dataset file at region:",self.tag) - # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') - lat = f2['lat'] - lon = f2['lon'] - # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(lsmlat=yind,lsmlon=xind) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fluse_in - - wfile=self.fluse_out - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - - -def setup_logging(log_file, log_level): - """ - Setup logging to log to console and log file. - """ - - root_logger = logging.getLogger() - root_logger.setLevel(log_level) - - # setup log file - one_mb = 1000000 - handler = logging.handlers.RotatingFileHandler(log_file, maxBytes=one_mb , backupCount=10) - - fmt = logging.Formatter( - '%(asctime)s %(name)-12s %(levelname)-8s %(message)s', - datefmt='%y-%m-%d %H:%M:%S') - - handler.setFormatter(fmt) - root_logger.addHandler(handler) - - # setup logging to console - stream_handler = logging.StreamHandler(sys.stdout) - stream_handler.setFormatter(fmt) - root_logger.addHandler(stream_handler) - - # redirect stdout/err to log file - StreamToLogger.setup_stdout() - StreamToLogger.setup_stderr() - - - -class StreamToLogger(object): - """ - Custom class to log all stdout and stderr streams. - modified from: - https://www.electricmonk.nl/log/2011/08/14/redirect-stdout-and-stderr-to-a-logger-in-python/ - """ - def __init__(self, stream, logger, log_level=logging.INFO, - also_log_to_stream=False): - self.logger = logger - self.stream = stream - self.log_level = log_level - self.linebuf = '' - self.also_log_to_stream = also_log_to_stream - - @classmethod - def setup_stdout(cls, also_log_to_stream=True): - """ - Setup logger for stdout - """ - stdout_logger = logging.getLogger('STDOUT') - sl = StreamToLogger(sys.stdout, stdout_logger, logging.INFO, also_log_to_stream) - sys.stdout = sl - - @classmethod - def setup_stderr(cls, also_log_to_stream=True): - """ - Setup logger for stdout - """ - stderr_logger = logging.getLogger('STDERR') - sl = StreamToLogger(sys.stderr, stderr_logger, logging.ERROR, also_log_to_stream) - sys.stderr = sl - - def write(self, buf): - temp_linebuf = self.linebuf + buf - self.linebuf = '' - for line in temp_linebuf.splitlines(True): - if line[-1] == '\n': - self.logger.log(self.log_level, line.rstrip()) - else: - self.linebuf += line - - def flush(self): - if self.linebuf != '': - self.logger.log(self.log_level, self.linebuf.rstrip()) - self.linebuf = '' - - - - -def main (): - - args = get_parser().parse_args() - - # --------------------------------- # - - today = date.today() - today_string = today.strftime("%Y%m%d") - - pwd = os.getcwd() - - log_file = os.path.join(pwd, today_string+'.log') - - log_level = logging.DEBUG - setup_logging(log_file, log_level) - log = logging.getLogger(__name__) - - print("User = "+myname) - print("Current directory = "+pwd) - - # --------------------------------- # - - if (args.run_type == "point"): - print( "----------------------------------------------------------------------------") - print( "This script extracts a single point from the global CTSM inputdata datasets." ) - - #-- Specify point to extract - plon = args.plon - plat = args.plat - - #-- Create regional CLM domain file - create_domain = args.create_domain - #-- Create CLM surface data file - create_surfdata = args.create_surfdata - #-- Create CLM surface data file - create_landuse = args.create_landuse - #-- Create single point DATM atmospheric forcing data - create_datm = args.create_datm - datm_syr = args.datm_syr - datm_eyr = args.datm_eyr - - crop_flag = args.crop_flag - - site_name = args.site_name - - #-- Modify landunit structure - overwrite_single_pft = args.overwrite_single_pft - dominant_pft = args.dom_pft - zero_nonveg_landunits= args.zero_nonveg - uniform_snowpack = args.uni_snow - no_saturation_excess = args.no_saturation_excess - - - #-- Create SinglePoint Object - single_point = SinglePointCase(plat, plon,site_name, - create_domain, create_surfdata, create_landuse, create_datm, - overwrite_single_pft, dominant_pft, zero_nonveg_landunits, uniform_snowpack, - no_saturation_excess) - single_point.create_tag() - - - print (single_point) - - if crop_flag: - num_pft = "78" - else: - num_pft = "16" - - print('crop_flag = '+ crop_flag.__str__()+ ' => num_pft ='+ num_pft) - - #-- Set input and output filenames - #-- Specify input and output directories - dir_output = args.out_dir - if ( not os.path.isdir( dir_output ) ): - os.mkdir( dir_output ) - - dir_inputdata='/glade/p/cesmdata/cseg/inputdata/' - dir_clm_forcedata='/glade/p/cgd/tss/CTSM_datm_forcing_data/' - dir_input_datm=os.path.join(dir_clm_forcedata,'atm_forcing.datm7.GSWP3.0.5d.v1.c170516/') - dir_output_datm=os.path.join(dir_output , 'datmdata/') - if ( not os.path.isdir( dir_output_datm ) ): - os.mkdir( dir_output_datm ) - - print ("dir_input_datm : ", dir_input_datm) # - print ("dir_output_datm : ", dir_output_datm) # - - - #-- Set time stamp - today = date.today() - timetag = today.strftime("%y%m%d") - - #-- Specify land domain file --------------------------------- - fdomain_in = os.path.join(dir_inputdata,'share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc') - fdomain_out = dir_output + single_point.add_tag_to_filename( fdomain_in, single_point.tag ) - single_point.fdomain_in = fdomain_in - single_point.fdomain_out = fdomain_out - print ("fdomain_in :",fdomain_in) # - print ("fdomain_out :",fdomain_out) # - - #-- Specify surface data file -------------------------------- - if crop_flag: - fsurf_in = os.path.join (dir_inputdata, 'lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc') - else: - fsurf_in = os.path.join (dir_inputdata, 'lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc') - - #fsurf_out = dir_output + single_point.add_tag_to_filename(fsurf_in, single_point.tag) # remove res from filename for singlept - fsurf_out = dir_output + single_point.create_fileout_name(fsurf_in, single_point.tag) - single_point.fsurf_in = fsurf_in - single_point.fsurf_out = fsurf_out - print ("fsurf_in :",fsurf_in) # - print ("fsurf_out :",fsurf_out) # - - #-- Specify landuse file ------------------------------------- - if crop_flag: - fluse_in = os.path.join (dir_inputdata,'lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc') - else: - fluse_in = os.path.join (dir_inputdata,'lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc') - #fluse_out = dir_output + single_point.add_tag_to_filename( fluse_in, single_point.tag ) # remove resolution from filename for singlept cases - fluse_out = dir_output + single_point.create_fileout_name(fluse_in, single_point.tag) - single_point.fluse_in = fluse_in - single_point.fluse_out = fluse_out - print ("fluse_in :", fluse_in) # - print ("fluse_out :", fluse_out) # - - #-- Specify datm domain file --------------------------------- - fdatmdomain_in = os.path.join (dir_clm_forcedata,'atm_forcing.datm7.GSWP3.0.5d.v1.c170516/domain.lnd.360x720_gswp3.0v1.c170606.nc') - fdatmdomain_out = dir_output_datm+single_point.add_tag_to_filename( fdatmdomain_in, single_point.tag ) - single_point.fdatmdomain_in = fdatmdomain_in - single_point.fdatmdomain_out = fdatmdomain_out - print ("fdatmdomain_in : ", fdatmdomain_in) # - print ("fdatmdomain out : ", fdatmdomain_out) # - - #-- Create CTSM domain file - if create_domain: - single_point.create_domain_at_point() - - #-- Create CTSM surface data file - if create_surfdata: - single_point.create_surfdata_at_point() - - #-- Create CTSM transient landuse data file - if create_landuse: - single_point.create_landuse_at_point() - - #-- Create single point atmospheric forcing data - if create_datm: - single_point.create_datmdomain_at_point() - single_point.datm_syr =datm_syr - single_point.datm_eyr =datm_eyr - single_point.dir_input_datm = dir_input_datm - single_point.dir_output_datm = dir_output_datm - single_point.create_datm_at_point() - - print( "Successfully ran script for single point." ) - exit() - - elif (args.run_type == "reg"): - print ("Running the script for the region") - #-- Specify region to extract - lat1 = args.lat1 - lat2 = args.lat2 - - lon1 = args.lon1 - lon2 = args.lon2 - - #-- Create regional CLM domain file - create_domain = args.create_domain - #-- Create CLM surface data file - create_surfdata = args.create_surfdata - #-- Create CLM surface data file - create_landuse = args.create_landuse - #-- Create DATM atmospheric forcing data - create_datm = args.create_datm - - crop_flag = args.crop_flag - - reg_name = args.reg_name - - region = RegionalCase(lat1, lat2, lon1, lon2, reg_name, create_domain, create_surfdata, create_landuse, create_datm) - - print (region) - - if crop_flag: - num_pft = "78" - else: - num_pft = "16" - - - print(' crop_flag = '+ crop_flag.__str__()+ ' num_pft ='+ num_pft) - - - region.create_tag() - - #-- Set input and output filenames - #-- Specify input and output directories - dir_output='/glade/scratch/'+myname+'/region/' - if ( not os.path.isdir( dir_output ) ): - os.mkdir( dir_output ) - - dir_inputdata='/glade/p/cesmdata/cseg/inputdata/' - dir_clm_forcedata='/glade/p/cgd/tss/CTSM_datm_forcing_data/' - - #-- Set time stamp - command='date "+%y%m%d"' - x2=subprocess.Popen(command,stdout=subprocess.PIPE,shell='True') - x=x2.communicate() - timetag = x[0].strip() - print (timetag) - - #-- Specify land domain file --------------------------------- - fdomain_in = dir_inputdata+'share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc' - fdomain_out = dir_output + 'domain.lnd.fv1.9x2.5_gx1v7.'+region.tag+'_170518.nc' - #SinglePointCase.set_fdomain (fdomain) - region.fdomain_in = fdomain_in - region.fdomain_out = fdomain_out - - #-- Specify surface data file -------------------------------- - fsurf_in = dir_inputdata+'lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc' - fsurf_out = dir_output + 'surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_'+region.tag+'_c170824.nc' - region.fsurf_in = fsurf_in - region.fsurf_out = fsurf_out - - #-- Specify landuse file ------------------------------------- - fluse_in = dir_inputdata+'lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc' - fluse_out = dir_output + 'landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_'+region.tag+'.c170824.nc' - region.fluse_in = fluse_in - region.fluse_out = fluse_out - - #-- Create CTSM domain file - if create_domain: - region.create_domain_at_reg() - - #-- Create CTSM surface data file - if create_surfdata: - region.create_surfdata_at_reg() - - #-- Create CTSM transient landuse data file - if create_landuse: - region.create_landuse_at_reg() - print( "Successfully ran script for a regional case." ) - - else : - # print help when no option is chosen - get_parser().print_help() - -if __name__ == "__main__": - main() From cb317e9d14667a2901ea7be399ef099bf660dc92 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 13:03:47 -0600 Subject: [PATCH 010/223] some small changes... --- python/ctsm/subset_data.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index f02fab3211..c4795cb817 100755 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -23,7 +23,8 @@ After creating a case using a global compset, run preview_namelist. From the resulting lnd_in file in the run directory, find the name -of the domain file, and the surface data file. +of the domain file, and the surface data file. + From the datm streams files (e.g. datm.streams.txt.CLMGSWP3v1.Precip) find the name of the datm forcing data domain file and forcing files. Use these file names as the sources for the single point/regional @@ -55,18 +56,20 @@ and land ice models turned off (i.e. the compset should use stub models SROF and SGLC) +By default, it only extracts surface dataset and for extracting other +files, the appropriate flags should be used. ------------------------------------------------------------------- To run the script for a single point: - ./subset_data.py point + ./subset_data.py point --help To run the script for a region: - ./subset_data.py reg + ./subset_data.py reg --help To remove NPL from your environment on Cheyenne/Casper: deactivate ------------------------------------------------------------------- - """ + # TODO # Automatic downloading of missing files if they are missing # default 78 pft vs 16 pft @@ -86,13 +89,14 @@ from datetime import date from getpass import getuser +from logging.handlers import RotatingFileHandler from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter # -- add python/ctsm to path _CTSM_PYTHON = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" ) -# print (_CTSM_PYTHON) + sys.path.insert(1, _CTSM_PYTHON) From 9edf9b007f206b5b788bcb309f21c8918d7d56ae Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 13:04:44 -0600 Subject: [PATCH 011/223] running this through formatter... --- python/ctsm/subset_data.py | 926 +++++++++++++++++++++---------------- 1 file changed, 534 insertions(+), 392 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index c4795cb817..0f1154e263 100755 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -94,8 +94,8 @@ # -- add python/ctsm to path _CTSM_PYTHON = os.path.join( - os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" - ) + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" +) sys.path.insert(1, _CTSM_PYTHON) @@ -106,238 +106,298 @@ myname = getuser() -def get_parser(): - """Get parser object for this script.""" - #parser = ArgumentParser(description=__doc__, - # formatter_class=ArgumentDefaultsHelpFormatter) - parser = ArgumentParser(description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - - parser.print_usage = parser.print_help - subparsers = parser.add_subparsers( - help='Two possible ways to run this sript, either:', - dest ='run_type') - pt_parser = subparsers.add_parser('point', - help = 'Run script for a single point.') - rg_parser = subparsers.add_parser('reg', - help = 'Run script for a region.') - - - pt_parser.add_argument('--lat', - help='Single point latitude. [default: %(default)s]', - action="store", - dest="plat", - required=False, - type = plat_type, - default=42.5) - pt_parser.add_argument('--lon', - help='Single point longitude. [default: %(default)s]', - action="store", - dest="plon", - required=False, - type = plon_type, - default= 287.8 ) - pt_parser.add_argument('--site', - help='Site name or tag. [default: %(default)s]', - action="store", - dest="site_name", - required = False, - type = str, - default = '') - pt_parser.add_argument('--create_domain', - help='Flag for creating CLM domain file at single point. [default: %(default)s]', - action="store", - dest="create_domain", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - pt_parser.add_argument('--create_surface', - help='Flag for creating surface data file at single point. [default: %(default)s]', - action="store", - dest="create_surfdata", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = True) - pt_parser.add_argument('--create_landuse', - help='Flag for creating landuse data file at single point. [default: %(default)s]', - action="store", - dest="create_landuse", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - pt_parser.add_argument('--create_datm', - help='Flag for creating DATM forcing data at single point. [default: %(default)s]', - action="store", - dest="create_datm", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - pt_parser.add_argument('--datm_syr', - help='Start year for creating DATM forcing at single point. [default: %(default)s]', - action="store", - dest="datm_syr", - required = False, - type = int, - default = 1901) - pt_parser.add_argument('--datm_eyr', - help='End year for creating DATM forcing at single point. [default: %(default)s]', - action="store", - dest="datm_eyr", - required = False, - type = int, - default = 2014) - pt_parser.add_argument('--crop', - help='Create datasets using the extensive list of prognostic crop types. [default: %(default)s]', - action="store_true", - dest="crop_flag", - default=False) - pt_parser.add_argument('--dompft', - help='Dominant PFT type . [default: %(default)s] ', - action="store", - dest="dom_pft", - type =int, - default=7) - pt_parser.add_argument('--no-unisnow', - help='Turn off the flag for create uniform snowpack. [default: %(default)s]', - action="store_false", - dest="uni_snow", - default=True) - pt_parser.add_argument('--no-overwrite_single_pft', - help='Turn off the flag for making the whole grid 100%% single PFT. [default: %(default)s]', - action="store_false", - dest="overwrite_single_pft", - default=True) - pt_parser.add_argument('--zero_nonveg', - help='Set all non-vegetation landunits to zero. [default: %(default)s]', - action="store", - dest="zero_nonveg", - type =bool, - default=True) - pt_parser.add_argument('--no_saturation_excess', - help='Turn off the flag for saturation excess. [default: %(default)s]', - action="store", - dest="no_saturation_excess", - type =bool, - default=True) - pt_parser.add_argument('--outdir', - help='Output directory. [default: %(default)s]', - action="store", - dest="out_dir", - type =str, - default="/glade/scratch/"+myname+"/single_point/") - - rg_parser.add_argument('--lat1', - help='Region start latitude. [default: %(default)s]', - action="store", - dest="lat1", - required=False, - type = plat_type, - default=-40) - rg_parser.add_argument('--lat2', - help='Region end latitude. [default: %(default)s]', - action="store", - dest="lat2", - required=False, - type = plat_type, - default=15) - rg_parser.add_argument('--lon1', - help='Region start longitude. [default: %(default)s]', - action="store", - dest="lon1", - required=False, - type = plon_type, - default= 275. ) - rg_parser.add_argument('--lon2', - help='Region end longitude. [default: %(default)s]', - action="store", - dest="lon2", - required=False, - type = plon_type, - default= 330. ) - rg_parser.add_argument('--reg', - help='Region name or tag. [default: %(default)s]', - action="store", - dest="reg_name", - required = False, - type = str, - default = '') - rg_parser.add_argument('--create_domain', - help='Flag for creating CLM domain file for a region. [default: %(default)s]', - action="store", - dest="create_domain", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - rg_parser.add_argument('--create_surface', - help='Flag for creating surface data file for a region. [default: %(default)s]', - action="store", - dest="create_surfdata", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = True) - rg_parser.add_argument('--create_landuse', - help='Flag for creating landuse data file for a region. [default: %(default)s]', - action="store", - dest="create_landuse", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - rg_parser.add_argument('--create_datm', - help='Flag for creating DATM forcing data for a region. [default: %(default)s]', - action="store", - dest="create_datm", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - rg_parser.add_argument('--datm_syr', - help='Start year for creating DATM forcing for a region. [default: %(default)s]', - action="store", - dest="datm_syr", - required = False, - type = int, - default = 1901) - rg_parser.add_argument('--datm_eyr', - help='End year for creating DATM forcing for a region. [default: %(default)s]', - action="store", - dest="datm_eyr", - required = False, - type = int, - default = 2014) - rg_parser.add_argument('--crop', - help='Create datasets using the extensive list of prognostic crop types. [default: %(default)s]', - action="store_true", - dest="crop_flag", - default=False) - rg_parser.add_argument('--dompft', - help='Dominant PFT type . [default: %(default)s] ', - action="store", - dest="dom_pft", - type =int, - default=7) - rg_parser.add_argument('--outdir', - help='Output directory. [default: %(default)s]', - action="store", - dest="out_dir", - type =str, - default="/glade/scratch/"+myname+"/regional/") - - return parser + +def get_parser(): + """Get parser object for this script.""" + # parser = ArgumentParser(description=__doc__, + # formatter_class=ArgumentDefaultsHelpFormatter) + parser = ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.print_usage = parser.print_help + subparsers = parser.add_subparsers( + help="Two possible ways to run this sript, either:", dest="run_type" + ) + pt_parser = subparsers.add_parser("point", help="Run script for a single point.") + rg_parser = subparsers.add_parser("reg", help="Run script for a region.") + + pt_parser.add_argument( + "--lat", + help="Single point latitude. [default: %(default)s]", + action="store", + dest="plat", + required=False, + type=plat_type, + default=42.5, + ) + pt_parser.add_argument( + "--lon", + help="Single point longitude. [default: %(default)s]", + action="store", + dest="plon", + required=False, + type=plon_type, + default=287.8, + ) + pt_parser.add_argument( + "--site", + help="Site name or tag. [default: %(default)s]", + action="store", + dest="site_name", + required=False, + type=str, + default="", + ) + pt_parser.add_argument( + "--create_domain", + help="Flag for creating CLM domain file at single point. [default: %(default)s]", + action="store", + dest="create_domain", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + pt_parser.add_argument( + "--create_surface", + help="Flag for creating surface data file at single point. [default: %(default)s]", + action="store", + dest="create_surfdata", + type=str2bool, + nargs="?", + const=True, + required=False, + default=True, + ) + pt_parser.add_argument( + "--create_landuse", + help="Flag for creating landuse data file at single point. [default: %(default)s]", + action="store", + dest="create_landuse", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + pt_parser.add_argument( + "--create_datm", + help="Flag for creating DATM forcing data at single point. [default: %(default)s]", + action="store", + dest="create_datm", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + pt_parser.add_argument( + "--datm_syr", + help="Start year for creating DATM forcing at single point. [default: %(default)s]", + action="store", + dest="datm_syr", + required=False, + type=int, + default=1901, + ) + pt_parser.add_argument( + "--datm_eyr", + help="End year for creating DATM forcing at single point. [default: %(default)s]", + action="store", + dest="datm_eyr", + required=False, + type=int, + default=2014, + ) + pt_parser.add_argument( + "--crop", + help="Create datasets using the extensive list of prognostic crop types. [default: %(default)s]", + action="store_true", + dest="crop_flag", + default=False, + ) + pt_parser.add_argument( + "--dompft", + help="Dominant PFT type . [default: %(default)s] ", + action="store", + dest="dom_pft", + type=int, + default=7, + ) + pt_parser.add_argument( + "--no-unisnow", + help="Turn off the flag for create uniform snowpack. [default: %(default)s]", + action="store_false", + dest="uni_snow", + default=True, + ) + pt_parser.add_argument( + "--no-overwrite_single_pft", + help="Turn off the flag for making the whole grid 100%% single PFT. [default: %(default)s]", + action="store_false", + dest="overwrite_single_pft", + default=True, + ) + pt_parser.add_argument( + "--zero_nonveg", + help="Set all non-vegetation landunits to zero. [default: %(default)s]", + action="store", + dest="zero_nonveg", + type=bool, + default=True, + ) + pt_parser.add_argument( + "--no_saturation_excess", + help="Turn off the flag for saturation excess. [default: %(default)s]", + action="store", + dest="no_saturation_excess", + type=bool, + default=True, + ) + pt_parser.add_argument( + "--outdir", + help="Output directory. [default: %(default)s]", + action="store", + dest="out_dir", + type=str, + default="/glade/scratch/" + myname + "/single_point/", + ) + + rg_parser.add_argument( + "--lat1", + help="Region start latitude. [default: %(default)s]", + action="store", + dest="lat1", + required=False, + type=plat_type, + default=-40, + ) + rg_parser.add_argument( + "--lat2", + help="Region end latitude. [default: %(default)s]", + action="store", + dest="lat2", + required=False, + type=plat_type, + default=15, + ) + rg_parser.add_argument( + "--lon1", + help="Region start longitude. [default: %(default)s]", + action="store", + dest="lon1", + required=False, + type=plon_type, + default=275.0, + ) + rg_parser.add_argument( + "--lon2", + help="Region end longitude. [default: %(default)s]", + action="store", + dest="lon2", + required=False, + type=plon_type, + default=330.0, + ) + rg_parser.add_argument( + "--reg", + help="Region name or tag. [default: %(default)s]", + action="store", + dest="reg_name", + required=False, + type=str, + default="", + ) + rg_parser.add_argument( + "--create_domain", + help="Flag for creating CLM domain file for a region. [default: %(default)s]", + action="store", + dest="create_domain", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + rg_parser.add_argument( + "--create_surface", + help="Flag for creating surface data file for a region. [default: %(default)s]", + action="store", + dest="create_surfdata", + type=str2bool, + nargs="?", + const=True, + required=False, + default=True, + ) + rg_parser.add_argument( + "--create_landuse", + help="Flag for creating landuse data file for a region. [default: %(default)s]", + action="store", + dest="create_landuse", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + rg_parser.add_argument( + "--create_datm", + help="Flag for creating DATM forcing data for a region. [default: %(default)s]", + action="store", + dest="create_datm", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + rg_parser.add_argument( + "--datm_syr", + help="Start year for creating DATM forcing for a region. [default: %(default)s]", + action="store", + dest="datm_syr", + required=False, + type=int, + default=1901, + ) + rg_parser.add_argument( + "--datm_eyr", + help="End year for creating DATM forcing for a region. [default: %(default)s]", + action="store", + dest="datm_eyr", + required=False, + type=int, + default=2014, + ) + rg_parser.add_argument( + "--crop", + help="Create datasets using the extensive list of prognostic crop types. [default: %(default)s]", + action="store_true", + dest="crop_flag", + default=False, + ) + rg_parser.add_argument( + "--dompft", + help="Dominant PFT type . [default: %(default)s] ", + action="store", + dest="dom_pft", + type=int, + default=7, + ) + rg_parser.add_argument( + "--outdir", + help="Output directory. [default: %(default)s]", + action="store", + dest="out_dir", + type=str, + default="/glade/scratch/" + myname + "/regional/", + ) + + return parser + def str2bool(v): """ @@ -356,13 +416,15 @@ def str2bool(v): bool: Boolean value corresponding to the input. """ if isinstance(v, bool): - return v - if v.lower() in ('yes', 'true', 't', 'y', '1'): + return v + if v.lower() in ("yes", "true", "t", "y", "1"): return True - elif v.lower() in ('no', 'false', 'f', 'n', '0'): + elif v.lower() in ("no", "false", "f", "n", "0"): return False else: - raise argparse.ArgumentTypeError('Boolean value expected. [true or false] or [y or n]') + raise argparse.ArgumentTypeError( + "Boolean value expected. [true or false] or [y or n]" + ) def plat_type(x): @@ -373,31 +435,40 @@ def plat_type(x): """ x = float(x) if (x < -90) or (x > 90): - raise argparse.ArgumentTypeError("ERROR: Latitude should be between -90 and 90.") + raise argparse.ArgumentTypeError( + "ERROR: Latitude should be between -90 and 90." + ) return x def plon_type(x): """ Function to define lon type for the parser and - convert negative longitudes and + convert negative longitudes and raise error if lon is not between -180 and 360. """ x = float(x) if (-180 < x) and (x < 0): - print ("lon is :", lon) - x= x%360 - print ("after modulo lon is :", lon) + print("lon is :", lon) + x = x % 360 + print("after modulo lon is :", lon) if (x < 0) or (x > 360): - raise argparse.ArgumentTypeError("ERROR: Latitude of single point should be between 0 and 360 or -180 and 180.") + raise argparse.ArgumentTypeError( + "ERROR: Latitude of single point should be between 0 and 360 or -180 and 180." + ) return x + def get_git_sha(): """ Returns Git short SHA for the currect directory. """ try: - sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() + sha = ( + subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) + .strip() + .decode() + ) except subprocess.CalledProcessError: sha = "NOT-A-GIT-REPOSITORY" return sha @@ -413,11 +484,14 @@ def setup_logging(log_file, log_level): # setup log file one_mb = 1000000 - handler = logging.handlers.RotatingFileHandler(log_file, maxBytes=one_mb , backupCount=10) + handler = logging.handlers.RotatingFileHandler( + log_file, maxBytes=one_mb, backupCount=10 + ) fmt = logging.Formatter( - '%(asctime)s %(name)-12s %(levelname)-8s %(message)s', - datefmt='%y-%m-%d %H:%M:%S') + "%(asctime)s %(name)-12s %(levelname)-8s %(message)s", + datefmt="%y-%m-%d %H:%M:%S", + ) handler.setFormatter(fmt) root_logger.addHandler(handler) @@ -432,19 +506,20 @@ def setup_logging(log_file, log_level): StreamToLogger.setup_stderr() - class StreamToLogger(object): """ Custom class to log all stdout and stderr streams. modified from: https://www.electricmonk.nl/log/2011/08/14/redirect-stdout-and-stderr-to-a-logger-in-python/ """ - def __init__(self, stream, logger, log_level=logging.INFO, - also_log_to_stream=False): + + def __init__( + self, stream, logger, log_level=logging.INFO, also_log_to_stream=False + ): self.logger = logger self.stream = stream self.log_level = log_level - self.linebuf = '' + self.linebuf = "" self.also_log_to_stream = also_log_to_stream @classmethod @@ -452,7 +527,7 @@ def setup_stdout(cls, also_log_to_stream=True): """ Setup logger for stdout """ - stdout_logger = logging.getLogger('STDOUT') + stdout_logger = logging.getLogger("STDOUT") sl = StreamToLogger(sys.stdout, stdout_logger, logging.INFO, also_log_to_stream) sys.stdout = sl @@ -461,28 +536,28 @@ def setup_stderr(cls, also_log_to_stream=True): """ Setup logger for stdout """ - stderr_logger = logging.getLogger('STDERR') - sl = StreamToLogger(sys.stderr, stderr_logger, logging.ERROR, also_log_to_stream) + stderr_logger = logging.getLogger("STDERR") + sl = StreamToLogger( + sys.stderr, stderr_logger, logging.ERROR, also_log_to_stream + ) sys.stderr = sl def write(self, buf): - temp_linebuf = self.linebuf + buf - self.linebuf = '' + temp_linebuf = self.linebuf + buf + self.linebuf = "" for line in temp_linebuf.splitlines(True): - if line[-1] == '\n': + if line[-1] == "\n": self.logger.log(self.log_level, line.rstrip()) else: self.linebuf += line def flush(self): - if self.linebuf != '': - self.logger.log(self.log_level, self.linebuf.rstrip()) - self.linebuf = '' - + if self.linebuf != "": + self.logger.log(self.log_level, self.linebuf.rstrip()) + self.linebuf = "" - -def main (): +def main(): args = get_parser().parse_args() @@ -493,33 +568,37 @@ def main (): pwd = os.getcwd() - log_file = os.path.join(pwd, today_string+'.log') + log_file = os.path.join(pwd, today_string + ".log") - log_level = logging.DEBUG + log_level = logging.DEBUG setup_logging(log_file, log_level) log = logging.getLogger(__name__) - print("User = "+myname) - print("Current directory = "+pwd) + print("User = " + myname) + print("Current directory = " + pwd) # --------------------------------- # - if (args.run_type == "point"): - print( "----------------------------------------------------------------------------") - print( "This script extracts a single point from the global CTSM inputdata datasets." ) + if args.run_type == "point": + print( + "----------------------------------------------------------------------------" + ) + print( + "This script extracts a single point from the global CTSM inputdata datasets." + ) - #-- Specify point to extract + # -- Specify point to extract plon = args.plon plat = args.plat - #-- Create regional CLM domain file - create_domain = args.create_domain - #-- Create CLM surface data file + # -- Create regional CLM domain file + create_domain = args.create_domain + # -- Create CLM surface data file create_surfdata = args.create_surfdata - #-- Create CLM surface data file - create_landuse = args.create_landuse - #-- Create single point DATM atmospheric forcing data - create_datm = args.create_datm + # -- Create CLM surface data file + create_landuse = args.create_landuse + # -- Create single point DATM atmospheric forcing data + create_datm = args.create_datm datm_syr = args.datm_syr datm_eyr = args.datm_eyr @@ -527,206 +606,269 @@ def main (): site_name = args.site_name - #-- Modify landunit structure + # -- Modify landunit structure overwrite_single_pft = args.overwrite_single_pft - dominant_pft = args.dom_pft - zero_nonveg_landunits= args.zero_nonveg - uniform_snowpack = args.uni_snow + dominant_pft = args.dom_pft + zero_nonveg_landunits = args.zero_nonveg + uniform_snowpack = args.uni_snow no_saturation_excess = args.no_saturation_excess - - #-- Create SinglePoint Object - single_point = SinglePointCase(plat, plon,site_name, - create_domain, create_surfdata, create_landuse, create_datm, - overwrite_single_pft, dominant_pft, zero_nonveg_landunits, uniform_snowpack, - no_saturation_excess) + # -- Create SinglePoint Object + single_point = SinglePointCase( + plat, + plon, + site_name, + create_domain, + create_surfdata, + create_landuse, + create_datm, + overwrite_single_pft, + dominant_pft, + zero_nonveg_landunits, + uniform_snowpack, + no_saturation_excess, + ) single_point.create_tag() - - print (single_point) - #output_to_logger (single_point) + print(single_point) + # output_to_logger (single_point) if crop_flag: - num_pft = "78" + num_pft = "78" else: - num_pft = "16" + num_pft = "16" - print('crop_flag = '+ crop_flag.__str__()+ ' => num_pft ='+ num_pft) + print("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) - #-- Set input and output filenames - #-- Specify input and output directories + # -- Set input and output filenames + # -- Specify input and output directories dir_output = args.out_dir - if ( not os.path.isdir( dir_output ) ): - os.mkdir( dir_output ) - - dir_inputdata='/glade/p/cesmdata/cseg/inputdata/' - dir_clm_forcedata='/glade/p/cgd/tss/CTSM_datm_forcing_data/' - dir_input_datm=os.path.join(dir_clm_forcedata,'atm_forcing.datm7.GSWP3.0.5d.v1.c170516/') - dir_output_datm=os.path.join(dir_output , 'datmdata/') - if ( not os.path.isdir( dir_output_datm ) ): - os.mkdir( dir_output_datm ) - - print ("dir_input_datm : ", dir_input_datm) # - print ("dir_output_datm : ", dir_output_datm) # - - - #-- Set time stamp + if not os.path.isdir(dir_output): + os.mkdir(dir_output) + + dir_inputdata = "/glade/p/cesmdata/cseg/inputdata/" + dir_clm_forcedata = "/glade/p/cgd/tss/CTSM_datm_forcing_data/" + dir_input_datm = os.path.join( + dir_clm_forcedata, "atm_forcing.datm7.GSWP3.0.5d.v1.c170516/" + ) + dir_output_datm = os.path.join(dir_output, "datmdata/") + if not os.path.isdir(dir_output_datm): + os.mkdir(dir_output_datm) + + print("dir_input_datm : ", dir_input_datm) # + print("dir_output_datm : ", dir_output_datm) # + + # -- Set time stamp today = date.today() timetag = today.strftime("%y%m%d") - #-- Specify land domain file --------------------------------- - fdomain_in = os.path.join(dir_inputdata,'share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc') - fdomain_out = dir_output + single_point.add_tag_to_filename( fdomain_in, single_point.tag ) + # -- Specify land domain file --------------------------------- + fdomain_in = os.path.join( + dir_inputdata, "share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc" + ) + fdomain_out = dir_output + single_point.add_tag_to_filename( + fdomain_in, single_point.tag + ) single_point.fdomain_in = fdomain_in single_point.fdomain_out = fdomain_out - print ("fdomain_in :",fdomain_in) # - print ("fdomain_out :",fdomain_out) # + print("fdomain_in :", fdomain_in) # + print("fdomain_out :", fdomain_out) # - #-- Specify surface data file -------------------------------- + # -- Specify surface data file -------------------------------- if crop_flag: - fsurf_in = os.path.join (dir_inputdata, 'lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc') + fsurf_in = os.path.join( + dir_inputdata, + "lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc", + ) else: - fsurf_in = os.path.join (dir_inputdata, 'lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc') + fsurf_in = os.path.join( + dir_inputdata, + "lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc", + ) - #fsurf_out = dir_output + single_point.add_tag_to_filename(fsurf_in, single_point.tag) # remove res from filename for singlept - fsurf_out = dir_output + single_point.create_fileout_name(fsurf_in, single_point.tag) + # fsurf_out = dir_output + single_point.add_tag_to_filename(fsurf_in, single_point.tag) # remove res from filename for singlept + fsurf_out = dir_output + single_point.create_fileout_name( + fsurf_in, single_point.tag + ) single_point.fsurf_in = fsurf_in single_point.fsurf_out = fsurf_out - print ("fsurf_in :",fsurf_in) # - print ("fsurf_out :",fsurf_out) # + print("fsurf_in :", fsurf_in) # + print("fsurf_out :", fsurf_out) # - #-- Specify landuse file ------------------------------------- + # -- Specify landuse file ------------------------------------- if crop_flag: - fluse_in = os.path.join (dir_inputdata,'lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc') + fluse_in = os.path.join( + dir_inputdata, + "lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc", + ) else: - fluse_in = os.path.join (dir_inputdata,'lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc') - #fluse_out = dir_output + single_point.add_tag_to_filename( fluse_in, single_point.tag ) # remove resolution from filename for singlept cases - fluse_out = dir_output + single_point.create_fileout_name(fluse_in, single_point.tag) + fluse_in = os.path.join( + dir_inputdata, + "lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc", + ) + # fluse_out = dir_output + single_point.add_tag_to_filename( fluse_in, single_point.tag ) # remove resolution from filename for singlept cases + fluse_out = dir_output + single_point.create_fileout_name( + fluse_in, single_point.tag + ) single_point.fluse_in = fluse_in single_point.fluse_out = fluse_out - print ("fluse_in :", fluse_in) # - print ("fluse_out :", fluse_out) # - - #-- Specify datm domain file --------------------------------- - fdatmdomain_in = os.path.join (dir_clm_forcedata,'atm_forcing.datm7.GSWP3.0.5d.v1.c170516/domain.lnd.360x720_gswp3.0v1.c170606.nc') - fdatmdomain_out = dir_output_datm+single_point.add_tag_to_filename( fdatmdomain_in, single_point.tag ) - single_point.fdatmdomain_in = fdatmdomain_in + print("fluse_in :", fluse_in) # + print("fluse_out :", fluse_out) # + + # -- Specify datm domain file --------------------------------- + fdatmdomain_in = os.path.join( + dir_clm_forcedata, + "atm_forcing.datm7.GSWP3.0.5d.v1.c170516/domain.lnd.360x720_gswp3.0v1.c170606.nc", + ) + fdatmdomain_out = dir_output_datm + single_point.add_tag_to_filename( + fdatmdomain_in, single_point.tag + ) + single_point.fdatmdomain_in = fdatmdomain_in single_point.fdatmdomain_out = fdatmdomain_out - print ("fdatmdomain_in : ", fdatmdomain_in) # - print ("fdatmdomain out : ", fdatmdomain_out) # + print("fdatmdomain_in : ", fdatmdomain_in) # + print("fdatmdomain out : ", fdatmdomain_out) # - #-- Create CTSM domain file + # -- Create CTSM domain file if create_domain: single_point.create_domain_at_point() - #-- Create CTSM surface data file + # -- Create CTSM surface data file if create_surfdata: single_point.create_surfdata_at_point() - #-- Create CTSM transient landuse data file + # -- Create CTSM transient landuse data file if create_landuse: single_point.create_landuse_at_point() - #-- Create single point atmospheric forcing data + # -- Create single point atmospheric forcing data if create_datm: single_point.create_datmdomain_at_point() - single_point.datm_syr =datm_syr - single_point.datm_eyr =datm_eyr + single_point.datm_syr = datm_syr + single_point.datm_eyr = datm_eyr single_point.dir_input_datm = dir_input_datm single_point.dir_output_datm = dir_output_datm single_point.create_datm_at_point() - print( "Successfully ran script for single point." ) + print("Successfully ran script for single point.") exit() - elif (args.run_type == "reg"): - print ("Running the script for the region") - #-- Specify region to extract - lat1 = args.lat1 + elif args.run_type == "reg": + print("Running the script for the region") + # -- Specify region to extract + lat1 = args.lat1 lat2 = args.lat2 lon1 = args.lon1 lon2 = args.lon2 - #-- Create regional CLM domain file - create_domain = args.create_domain - #-- Create CLM surface data file + # -- Create regional CLM domain file + create_domain = args.create_domain + # -- Create CLM surface data file create_surfdata = args.create_surfdata - #-- Create CLM surface data file - create_landuse = args.create_landuse - #-- Create DATM atmospheric forcing data - create_datm = args.create_datm + # -- Create CLM surface data file + create_landuse = args.create_landuse + # -- Create DATM atmospheric forcing data + create_datm = args.create_datm crop_flag = args.crop_flag reg_name = args.reg_name - region = RegionalCase(lat1, lat2, lon1, lon2, reg_name, create_domain, create_surfdata, create_landuse, create_datm) + region = RegionalCase( + lat1, + lat2, + lon1, + lon2, + reg_name, + create_domain, + create_surfdata, + create_landuse, + create_datm, + ) - print (region) + print(region) if crop_flag: - num_pft = "78" - else: - num_pft = "16" - - - print(' crop_flag = '+ crop_flag.__str__()+ ' num_pft ='+ num_pft) + num_pft = "78" + else: + num_pft = "16" + print(" crop_flag = " + crop_flag.__str__() + " num_pft =" + num_pft) region.create_tag() - #-- Set input and output filenames - #-- Specify input and output directories - dir_output='/glade/scratch/'+myname+'/region/' - if ( not os.path.isdir( dir_output ) ): - os.mkdir( dir_output ) + # -- Set input and output filenames + # -- Specify input and output directories + dir_output = "/glade/scratch/" + myname + "/region/" + if not os.path.isdir(dir_output): + os.mkdir(dir_output) - dir_inputdata='/glade/p/cesmdata/cseg/inputdata/' - dir_clm_forcedata='/glade/p/cgd/tss/CTSM_datm_forcing_data/' + dir_inputdata = "/glade/p/cesmdata/cseg/inputdata/" + dir_clm_forcedata = "/glade/p/cgd/tss/CTSM_datm_forcing_data/" - #-- Set time stamp - command='date "+%y%m%d"' - x2=subprocess.Popen(command,stdout=subprocess.PIPE,shell='True') - x=x2.communicate() + # -- Set time stamp + command = 'date "+%y%m%d"' + x2 = subprocess.Popen(command, stdout=subprocess.PIPE, shell="True") + x = x2.communicate() timetag = x[0].strip() - print (timetag) - - #-- Specify land domain file --------------------------------- - fdomain_in = dir_inputdata+'share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc' - fdomain_out = dir_output + 'domain.lnd.fv1.9x2.5_gx1v7.'+region.tag+'_170518.nc' - #SinglePointCase.set_fdomain (fdomain) + print(timetag) + + # -- Specify land domain file --------------------------------- + fdomain_in = ( + dir_inputdata + "share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc" + ) + fdomain_out = ( + dir_output + "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc" + ) + # SinglePointCase.set_fdomain (fdomain) region.fdomain_in = fdomain_in region.fdomain_out = fdomain_out - #-- Specify surface data file -------------------------------- - fsurf_in = dir_inputdata+'lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc' - fsurf_out = dir_output + 'surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_'+region.tag+'_c170824.nc' + # -- Specify surface data file -------------------------------- + fsurf_in = ( + dir_inputdata + + "lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc" + ) + fsurf_out = ( + dir_output + + "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" + + region.tag + + "_c170824.nc" + ) region.fsurf_in = fsurf_in region.fsurf_out = fsurf_out - #-- Specify landuse file ------------------------------------- - fluse_in = dir_inputdata+'lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc' - fluse_out = dir_output + 'landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_'+region.tag+'.c170824.nc' + # -- Specify landuse file ------------------------------------- + fluse_in = ( + dir_inputdata + + "lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc" + ) + fluse_out = ( + dir_output + + "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" + + region.tag + + ".c170824.nc" + ) region.fluse_in = fluse_in region.fluse_out = fluse_out - #-- Create CTSM domain file + # -- Create CTSM domain file if create_domain: region.create_domain_at_reg() - #-- Create CTSM surface data file + # -- Create CTSM surface data file if create_surfdata: region.create_surfdata_at_reg() - #-- Create CTSM transient landuse data file + # -- Create CTSM transient landuse data file if create_landuse: region.create_landuse_at_reg() - print( "Successfully ran script for a regional case." ) + print("Successfully ran script for a regional case.") - else : + else: # print help when no option is chosen get_parser().print_help() + if __name__ == "__main__": main() From 93243c6c2607b253a1d73cb653084854f9fafcc4 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 13:47:08 -0600 Subject: [PATCH 012/223] more comments... --- python/ctsm/subset_data.py | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 0f1154e263..4d3fc5ebd6 100755 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -108,9 +108,9 @@ def get_parser(): - """Get parser object for this script.""" - # parser = ArgumentParser(description=__doc__, - # formatter_class=ArgumentDefaultsHelpFormatter) + """ + Get parser object for this script. + """ parser = ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) @@ -432,6 +432,16 @@ def plat_type(x): Function to define lat type for the parser and raise error if latitude is not between -90 and 90. + + Args: + x(str): latitude + + Raises: + Error when x (latitude) is not between -90 and 90. + + Returns: + x (float): latitude in float + """ x = float(x) if (x < -90) or (x > 90): @@ -446,12 +456,21 @@ def plon_type(x): Function to define lon type for the parser and convert negative longitudes and raise error if lon is not between -180 and 360. + + Args: + x (str): longitude + + Raises: + Error: when latitude is <-180 and >360. + + Returns: + x(float): converted longitude between 0 and 360 """ x = float(x) if (-180 < x) and (x < 0): - print("lon is :", lon) + print("lon is :", x) x = x % 360 - print("after modulo lon is :", lon) + print("after modulo lon is :", x) if (x < 0) or (x > 360): raise argparse.ArgumentTypeError( "ERROR: Latitude of single point should be between 0 and 360 or -180 and 180." From 408c6043587ad59440c09a24bba1549930b70069 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 14:54:58 -0600 Subject: [PATCH 013/223] running through formatter... --- python/ctsm/site_and_regional/base_case.py | 124 ++++--- .../ctsm/site_and_regional/regional_case.py | 147 ++++---- .../site_and_regional/single_point_case.py | 340 ++++++++++-------- 3 files changed, 343 insertions(+), 268 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index eb4e48853c..1a17762281 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -1,19 +1,20 @@ import os import numpy as np import xarray as xr -import subprocess +import subprocess from datetime import date from getpass import getuser -myname = getuser() +myname = getuser() -class BaseCase : + +class BaseCase: """ Parent class to SinglePointCase and RegionalCase - + ... - + Attributes ---------- create_domain : bool @@ -24,46 +25,55 @@ class BaseCase : flag for creating landuse file create_datm : bool flag for creating DATM files - + Methods ------- create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) create 1d coordinate variables to enable sel() method - + add_tag_to_filename(filename, tag) - add a tag and timetag to a filename ending with - [._]cYYMMDD.nc or [._]YYMMDD.nc + add a tag and timetag to a filename ending with + [._]cYYMMDD.nc or [._]YYMMDD.nc """ + def __init__(self, create_domain, create_surfdata, create_landuse, create_datm): self.create_domain = create_domain self.create_surfdata = create_surfdata self.create_landuse = create_landuse self.create_datm = create_datm - + def __str__(self): - return str(self.__class__) + '\n' + '\n'.join((str(item) + ' = ' + str(self.__dict__[item]) - for item in sorted(self.__dict__))) - + return ( + str(self.__class__) + + "\n" + + "\n".join( + ( + str(item) + " = " + str(self.__dict__[item]) + for item in sorted(self.__dict__) + ) + ) + ) + @staticmethod - def create_1d_coord(filename, lon_varname , lat_varname , x_dim , y_dim): + def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): """ lon_varname : variable name that has 2d lon lat_varname : variable name that has 2d lat x_dim: dimension name in X -- lon y_dim: dimension name in Y -- lat """ - print( "Open file: "+filename ) + print("Open file: " + filename) f1 = xr.open_dataset(filename) - + # create 1d coordinate variables to enable sel() method - lon0 = np.asarray(f1[lon_varname][0,:]) - lat0 = np.asarray(f1[lat_varname][:,0]) - lon = xr.DataArray(lon0,name='lon',dims=x_dim,coords={x_dim:lon0}) - lat = xr.DataArray(lat0,name='lat',dims=y_dim,coords={y_dim:lat0}) - - f2=f1.assign({'lon':lon,'lat':lat}) - - f2.reset_coords([lon_varname,lat_varname]) + lon0 = np.asarray(f1[lon_varname][0, :]) + lat0 = np.asarray(f1[lat_varname][:, 0]) + lon = xr.DataArray(lon0, name="lon", dims=x_dim, coords={x_dim: lon0}) + lat = xr.DataArray(lat0, name="lat", dims=y_dim, coords={y_dim: lat0}) + + f2 = f1.assign({"lon": lon, "lat": lat}) + + f2.reset_coords([lon_varname, lat_varname]) f1.close() return f2 @@ -77,51 +87,59 @@ def add_tag_to_filename(filename, tag): """ basename = os.path.basename(filename) cend = -10 - if ( basename[cend] == "c" ): - cend = cend - 1 - if ( (basename[cend] != ".") and (basename[cend] != "_") ): - print ( "Trouble figuring out where to add tag to filename:"+filename ) - os.abort() + if basename[cend] == "c": + cend = cend - 1 + if (basename[cend] != ".") and (basename[cend] != "_"): + print("Trouble figuring out where to add tag to filename:" + filename) + os.abort() today = date.today() today_string = today.strftime("%y%m%d") - return( basename[:cend]+"_"+tag+"_c"+today_string +'.nc') - + return basename[:cend] + "_" + tag + "_c" + today_string + ".nc" + def update_metadata(self, nc): - #update attributes + # update attributes today = date.today() today_string = today.strftime("%Y-%m-%d") - - #get git hash + + # get git hash sha = self.get_git_sha() - - nc.attrs['Created_on'] = today_string - nc.attrs['Created_by'] = myname - nc.attrs['Created_with'] = os.path.abspath(__file__) + " -- "+sha - - #delete unrelated attributes if they exist - del_attrs = ['source_code', 'SVN_url', 'hostname', 'history' - 'History_Log', 'Logname', 'Host', 'Version', - 'Compiler_Optimized'] + + nc.attrs["Created_on"] = today_string + nc.attrs["Created_by"] = myname + nc.attrs["Created_with"] = os.path.abspath(__file__) + " -- " + sha + + # delete unrelated attributes if they exist + del_attrs = [ + "source_code", + "SVN_url", + "hostname", + "history" "History_Log", + "Logname", + "Host", + "Version", + "Compiler_Optimized", + ] attr_list = nc.attrs - + for attr in del_attrs: if attr in attr_list: - #print ("This attr should be deleted:", attr) - del(nc.attrs[attr]) - - - #for attr, value in attr_list.items(): - # print (attr + " = "+str(value)) + # print ("This attr should be deleted:", attr) + del nc.attrs[attr] + # for attr, value in attr_list.items(): + # print (attr + " = "+str(value)) @staticmethod - def get_git_sha(): + def get_git_sha(): """ Returns Git short SHA for the currect directory. """ try: - sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() + sha = ( + subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) + .strip() + .decode() + ) except subprocess.CalledProcessError: sha = "NOT-A-GIT-REPOSITORY" return sha - diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index c35422a54d..4b0d4fbb3c 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -3,98 +3,111 @@ import numpy as np import xarray as xr -class RegionalCase (BaseCase): + +class RegionalCase(BaseCase): """ A case to encapsulate regional cases. """ - - def __init__(self, lat1, lat2, lon1, lon2, reg_name, - create_domain, create_surfdata, create_landuse, create_datm): + + def __init__( + self, + lat1, + lat2, + lon1, + lon2, + reg_name, + create_domain, + create_surfdata, + create_landuse, + create_datm, + ): super().__init__(create_domain, create_surfdata, create_landuse, create_datm) self.lat1 = lat1 self.lat2 = lat2 self.lon1 = lon1 self.lon2 = lon2 self.reg_name = reg_name - + def create_tag(self): if self.reg_name: self.tag = self.reg_name else: - self.tag=str(self.lon1)+'-'+str(self.lon2)+'_'+str(self.lat1)+'-'+str(self.lat2) - - def create_domain_at_reg (self): - #print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating domain file at region:", self.tag) + self.tag = ( + str(self.lon1) + + "-" + + str(self.lon2) + + "_" + + str(self.lat1) + + "-" + + str(self.lat2) + ) + + def create_domain_at_reg(self): + # print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print("Creating domain file at region:", self.tag) # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') - lat = f2['lat'] - lon = f2['lon'] + f2 = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") + lat = f2["lat"] + lon = f2["lon"] # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(nj=yind,ni=xind) - - #update attributes + xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3 = f2.isel(nj=yind, ni=xind) + + # update attributes self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdomain_in - - wfile=self.fdomain_out + f3.attrs["Created_from"] = self.fdomain_in + + wfile = self.fdomain_out # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() + f3.to_netcdf(path=wfile, mode="w") + print("Successfully created file (fdomain_out)" + self.fdomain_out) + f2.close() + f3.close() - - def create_surfdata_at_reg(self): - #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating surface dataset file at region:", self.tag) + # print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print("Creating surface dataset file at region:", self.tag) # create 1d coordinate variables to enable sel() method filename = self.fsurf_in - f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') - lat = f2['lat'] - lon = f2['lon'] + f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + lat = f2["lat"] + lon = f2["lon"] # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(lsmlat=yind,lsmlon=xind) - - #update attributes + xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3 = f2.isel(lsmlat=yind, lsmlon=xind) + + # update attributes self.update_metadata(f3) - f3.attrs['Created_from'] = self.fsurf_in - + f3.attrs["Created_from"] = self.fsurf_in + # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode='w') - print('created file (fsurf_out)'+self.fsurf_out) - #f1.close(); - f2.close(); f3.close() - - - def create_landuse_at_reg (self): - #print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print ("Creating surface dataset file at region:",self.tag) + f3.to_netcdf(path=self.fsurf_out, mode="w") + print("created file (fsurf_out)" + self.fsurf_out) + # f1.close(); + f2.close() + f3.close() + + def create_landuse_at_reg(self): + # print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) + print("Creating surface dataset file at region:", self.tag) # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') - lat = f2['lat'] - lon = f2['lon'] + f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + lat = f2["lat"] + lon = f2["lon"] # subset longitude and latitude arrays - xind=np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind=np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3=f2.isel(lsmlat=yind,lsmlon=xind) - - #update attributes - self.update_metadata(f3) - f3.attrs['Created_from'] = self.fluse_in - - wfile=self.fluse_out - # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - - - - + xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f3 = f2.isel(lsmlat=yind, lsmlon=xind) + # update attributes + self.update_metadata(f3) + f3.attrs["Created_from"] = self.fluse_in + wfile = self.fluse_out + # mode 'w' overwrites file + f3.to_netcdf(path=wfile, mode="w") + print("Successfully created file (fdomain_out)" + self.fdomain_out) + f2.close() + f3.close() diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 1fd5fbdcf7..2f4949572a 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -4,12 +4,13 @@ import xarray as xr from datetime import date -class SinglePointCase (BaseCase): + +class SinglePointCase(BaseCase): """ A case to encapsulate single point cases. - + ... - + Attributes ---------- plat : float @@ -18,13 +19,13 @@ class SinglePointCase (BaseCase): longitude site_name: str -- default = None Site name - + Methods ------- create_tag create a tag for single point which is the site name or the "lon-lat" format if the site name does not exist. - + create_domain_at_point Create domain file at a single point. create_landuse_at_point: @@ -34,11 +35,22 @@ class SinglePointCase (BaseCase): create_datmdomain_at_point: Create DATM domain file at a single point. """ - - def __init__(self, plat, plon,site_name, - create_domain, create_surfdata, create_landuse, create_datm, - overwrite_single_pft, dominant_pft, zero_nonveg_landunits, - uniform_snowpack, no_saturation_excess): + + def __init__( + self, + plat, + plon, + site_name, + create_domain, + create_surfdata, + create_landuse, + create_datm, + overwrite_single_pft, + dominant_pft, + zero_nonveg_landunits, + uniform_snowpack, + no_saturation_excess, + ): super().__init__(create_domain, create_surfdata, create_landuse, create_datm) self.plat = plat self.plon = plon @@ -53,192 +65,224 @@ def create_tag(self): if self.site_name: self.tag = self.site_name else: - self.tag=str(self.plon)+'_'+str(self.plat) - + self.tag = str(self.plon) + "_" + str(self.plat) + @staticmethod - def create_fileout_name( filename,tag): - + def create_fileout_name(filename, tag): + basename = os.path.basename(filename) - items = basename.split('_') + items = basename.split("_") today = date.today() today_string = today.strftime("%y%m%d") - new_string = items[0]+"_"+items[2]+"_"+items[3]+"_"+ items[4] \ - +"_"+items[5]+"_"+items[6]+"_"+tag+"_c"+today_string+".nc" + new_string = ( + items[0] + + "_" + + items[2] + + "_" + + items[3] + + "_" + + items[4] + + "_" + + items[5] + + "_" + + items[6] + + "_" + + tag + + "_c" + + today_string + + ".nc" + ) return new_string - - def create_domain_at_point (self): - print( "----------------------------------------------------------------------") - print ("Creating domain file at ", self.plon, self.plat) + + def create_domain_at_point(self): + print("----------------------------------------------------------------------") + print("Creating domain file at ", self.plon, self.plat) # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, 'xc','yc','ni','nj') + f2 = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") # extract gridcell closest to plon/plat - f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') + f3 = f2.sel(ni=self.plon, nj=self.plat, method="nearest") # expand dimensions - f3 = f3.expand_dims(['nj','ni']) - - #update attributes + f3 = f3.expand_dims(["nj", "ni"]) + + # update attributes self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdomain_in - - wfile=self.fdomain_out - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdomain_out)'+self.fdomain_out) - f2.close(); f3.close() - - - - def create_landuse_at_point (self): - print( "----------------------------------------------------------------------") - print ("Creating landuse file at ", self.plon, self.plat, ".") + f3.attrs["Created_from"] = self.fdomain_in + + wfile = self.fdomain_out + f3.to_netcdf(path=wfile, mode="w") + print("Successfully created file (fdomain_out)" + self.fdomain_out) + f2.close() + f3.close() + + def create_landuse_at_point(self): + print("----------------------------------------------------------------------") + print("Creating landuse file at ", self.plon, self.plat, ".") # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, 'LONGXY','LATIXY','lsmlon','lsmlat') + f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat - f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') - + f3 = f2.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") + # expand dimensions - f3 = f3.expand_dims(['lsmlat','lsmlon']) - # specify dimension order - #f3 = f3.transpose('time','lat','lon') - f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') - #f3['YEAR'] = f3['YEAR'].squeeze() - + f3 = f3.expand_dims(["lsmlat", "lsmlon"]) + # specify dimension order + # f3 = f3.transpose('time','lat','lon') + f3 = f3.transpose(u"time", u"cft", u"natpft", u"lsmlat", u"lsmlon") + # f3['YEAR'] = f3['YEAR'].squeeze() + # revert expand dimensions of YEAR - year = np.squeeze(np.asarray(f3['YEAR'])) - x = xr.DataArray(year, coords={'time':f3['time']}, dims='time', name='YEAR') - x.attrs['units']='unitless' - x.attrs['long_name']='Year of PFT data' - f3['YEAR'] = x - - #update attributes + year = np.squeeze(np.asarray(f3["YEAR"])) + x = xr.DataArray(year, coords={"time": f3["time"]}, dims="time", name="YEAR") + x.attrs["units"] = "unitless" + x.attrs["long_name"] = "Year of PFT data" + f3["YEAR"] = x + + # update attributes self.update_metadata(f3) - f3.attrs['Created_from'] = self.fluse_in - + f3.attrs["Created_from"] = self.fluse_in + wfile = self.fluse_out # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (luse_out)'+self.fluse_out,".") - f2.close(); f3.close() - + f3.to_netcdf(path=wfile, mode="w") + print("Successfully created file (luse_out)" + self.fluse_out, ".") + f2.close() + f3.close() def create_surfdata_at_point(self): - print( "----------------------------------------------------------------------") - print ("Creating surface dataset file at ", self.plon, self.plat, ".") + print("----------------------------------------------------------------------") + print("Creating surface dataset file at ", self.plon, self.plat, ".") # create 1d coordinate variables to enable sel() method filename = self.fsurf_in - f2 = self.create_1d_coord(filename, 'LONGXY','LATIXY','lsmlon','lsmlat') + f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat - f3 = f2.sel(lsmlon=self.plon,lsmlat=self.plat,method='nearest') + f3 = f2.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") # expand dimensions - f3 = f3.expand_dims(['lsmlat','lsmlon']).copy(deep=True) - + f3 = f3.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) + # modify surface data properties if self.overwrite_single_pft: - f3['PCT_NAT_PFT'][:,:,:] = 0 - f3['PCT_NAT_PFT'][:,:,self.dominant_pft] = 100 + f3["PCT_NAT_PFT"][:, :, :] = 0 + f3["PCT_NAT_PFT"][:, :, self.dominant_pft] = 100 if self.zero_nonveg_landunits: - f3['PCT_NATVEG'][:,:] = 100 - f3['PCT_CROP'][:,:] = 0 - f3['PCT_LAKE'][:,:] = 0. - f3['PCT_WETLAND'][:,:] = 0. - f3['PCT_URBAN'][:,:,] = 0. - f3['PCT_GLACIER'][:,:] = 0. + f3["PCT_NATVEG"][:, :] = 100 + f3["PCT_CROP"][:, :] = 0 + f3["PCT_LAKE"][:, :] = 0.0 + f3["PCT_WETLAND"][:, :] = 0.0 + f3["PCT_URBAN"][ + :, + :, + ] = 0.0 + f3["PCT_GLACIER"][:, :] = 0.0 if self.uniform_snowpack: - f3['STD_ELEV'][:,:] = 20. + f3["STD_ELEV"][:, :] = 20.0 if self.no_saturation_excess: - f3['FMAX'][:,:] = 0. - - # specify dimension order - #f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') - f3 = f3.transpose(u'time', u'cft', u'lsmpft', u'natpft', u'nglcec', u'nglcecp1', u'nlevsoi', u'nlevurb', u'numrad', u'numurbl', 'lsmlat', 'lsmlon') - - #update attributes + f3["FMAX"][:, :] = 0.0 + + # specify dimension order + # f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') + f3 = f3.transpose( + u"time", + u"cft", + u"lsmpft", + u"natpft", + u"nglcec", + u"nglcecp1", + u"nlevsoi", + u"nlevurb", + u"numrad", + u"numurbl", + "lsmlat", + "lsmlon", + ) + + # update attributes self.update_metadata(f3) - f3.attrs['Created_from'] = self.fsurf_in - del(f3.attrs['History_Log']) + f3.attrs["Created_from"] = self.fsurf_in + del f3.attrs["History_Log"] # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode='w') - print('Successfully created file (fsurf_out) :'+self.fsurf_out) - f2.close(); f3.close() + f3.to_netcdf(path=self.fsurf_out, mode="w") + print("Successfully created file (fsurf_out) :" + self.fsurf_out) + f2.close() + f3.close() - def create_datmdomain_at_point(self): - print( "----------------------------------------------------------------------") - print("Creating DATM domain file at ", self.plon, self.plat, ".") + def create_datmdomain_at_point(self): + print("----------------------------------------------------------------------") + print("Creating DATM domain file at ", self.plon, self.plat, ".") # create 1d coordinate variables to enable sel() method filename = self.fdatmdomain_in - f2 = self.create_1d_coord(filename, 'xc','yc','ni','nj') + f2 = self.create_1d_coord(filename, "xc", "yc", "ni", "nj") # extract gridcell closest to plon/plat - f3 = f2.sel(ni=self.plon,nj=self.plat,method='nearest') + f3 = f2.sel(ni=self.plon, nj=self.plat, method="nearest") # expand dimensions - f3 = f3.expand_dims(['nj','ni']) - wfile=self.fdatmdomain_out - #update attributes + f3 = f3.expand_dims(["nj", "ni"]) + wfile = self.fdatmdomain_out + # update attributes self.update_metadata(f3) - f3.attrs['Created_from'] = self.fdatmdomain_in + f3.attrs["Created_from"] = self.fdatmdomain_in # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode='w') - print('Successfully created file (fdatmdomain_out) :'+self.fdatmdomain_out) - f2.close(); f3.close() + f3.to_netcdf(path=wfile, mode="w") + print("Successfully created file (fdatmdomain_out) :" + self.fdatmdomain_out) + f2.close() + f3.close() def extract_datm_at(self, file_in, file_out): # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(file_in, 'LONGXY','LATIXY','lon','lat') + f2 = self.create_1d_coord(file_in, "LONGXY", "LATIXY", "lon", "lat") # extract gridcell closest to plon/plat - f3 = f2.sel(lon=self.plon,lat=self.plat,method='nearest') + f3 = f2.sel(lon=self.plon, lat=self.plat, method="nearest") # expand dimensions - f3 = f3.expand_dims(['lat','lon']) - # specify dimension order - f3 = f3.transpose(u'scalar','time','lat','lon') - - #update attributes + f3 = f3.expand_dims(["lat", "lon"]) + # specify dimension order + f3 = f3.transpose(u"scalar", "time", "lat", "lon") + + # update attributes self.update_metadata(f3) - f3.attrs['Created_from'] = file_in + f3.attrs["Created_from"] = file_in # mode 'w' overwrites file - f3.to_netcdf(path=file_out, mode='w') - print('Successfully created file :'+ file_out) - f2.close(); f3.close() - + f3.to_netcdf(path=file_out, mode="w") + print("Successfully created file :" + file_out) + f2.close() + f3.close() + def create_datm_at_point(self): - print( "----------------------------------------------------------------------") + print("----------------------------------------------------------------------") print("Creating DATM files at ", self.plon, self.plat, ".") - #-- specify subdirectory names and filename prefixes - solrdir = 'Solar/' - precdir = 'Precip/' - tpqwldir = 'TPHWL/' - prectag = 'clmforc.GSWP3.c2011.0.5x0.5.Prec.' - solrtag = 'clmforc.GSWP3.c2011.0.5x0.5.Solr.' - tpqwtag = 'clmforc.GSWP3.c2011.0.5x0.5.TPQWL.' - - #-- create data files - infile=[] - outfile=[] - for y in range(self.datm_syr,self.datm_eyr+1): - ystr=str(y) - for m in range(1,13): - mstr=str(m) - if m < 10: - mstr='0'+mstr - - dtag=ystr+'-'+mstr - - fsolar=self.dir_input_datm+solrdir+solrtag+dtag+'.nc' - fsolar2=self.dir_output_datm+solrtag+self.tag+'.'+dtag+'.nc' - fprecip=self.dir_input_datm+precdir+prectag+dtag+'.nc' - fprecip2=self.dir_output_datm+prectag+self.tag+'.'+dtag+'.nc' - ftpqw=self.dir_input_datm+tpqwldir+tpqwtag+dtag+'.nc' - ftpqw2=self.dir_output_datm+tpqwtag+self.tag+'.'+dtag+'.nc' - - infile+=[fsolar,fprecip,ftpqw] - outfile+=[fsolar2,fprecip2,ftpqw2] - - nm=len(infile) + # -- specify subdirectory names and filename prefixes + solrdir = "Solar/" + precdir = "Precip/" + tpqwldir = "TPHWL/" + prectag = "clmforc.GSWP3.c2011.0.5x0.5.Prec." + solrtag = "clmforc.GSWP3.c2011.0.5x0.5.Solr." + tpqwtag = "clmforc.GSWP3.c2011.0.5x0.5.TPQWL." + + # -- create data files + infile = [] + outfile = [] + for y in range(self.datm_syr, self.datm_eyr + 1): + ystr = str(y) + for m in range(1, 13): + mstr = str(m) + if m < 10: + mstr = "0" + mstr + + dtag = ystr + "-" + mstr + + fsolar = self.dir_input_datm + solrdir + solrtag + dtag + ".nc" + fsolar2 = self.dir_output_datm + solrtag + self.tag + "." + dtag + ".nc" + fprecip = self.dir_input_datm + precdir + prectag + dtag + ".nc" + fprecip2 = ( + self.dir_output_datm + prectag + self.tag + "." + dtag + ".nc" + ) + ftpqw = self.dir_input_datm + tpqwldir + tpqwtag + dtag + ".nc" + ftpqw2 = self.dir_output_datm + tpqwtag + self.tag + "." + dtag + ".nc" + + infile += [fsolar, fprecip, ftpqw] + outfile += [fsolar2, fprecip2, ftpqw2] + + nm = len(infile) for n in range(nm): print(outfile[n]) file_in = infile[n] file_out = outfile[n] self.extract_datm_at(file_in, file_out) - - - print('All DATM files are created in: '+self.dir_output_datm) - - + print("All DATM files are created in: " + self.dir_output_datm) From 7f72af94b4a573050eaa0f58f5515584d355c694 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 7 Sep 2021 15:23:57 -0600 Subject: [PATCH 014/223] adding the deleted unit test. --- python/ctsm/test/test_unit_subset_data.py | 33 +++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 python/ctsm/test/test_unit_subset_data.py diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py new file mode 100644 index 0000000000..06aa79740d --- /dev/null +++ b/python/ctsm/test/test_unit_subset_data.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python3 + +import unittest +import argparse + +from ctsm.subset_data import plon_type +from ctsm import unit_testing + +# Allow names that pylint doesn't like, because otherwise I find it hard +# to make readable unit test names +# pylint: disable=invalid-name + +class TestSubsetData(unittest.TestCase): + + def test_plonType_positive(self): + result = plon_type(30) + self.assertEqual(result, 30.) + + def test_plonType_negative(self): + result = plon_type(-30) + self.assertEqual(result, 330.) + + def test_plonType_outOfBounds(self): + with self.assertRaisesRegex(argparse.ArgumentTypeError, + "Latitude.*should be between"): + _ = plon_type(361) + +"""Unit tests for subset_data +""" +if __name__ == '__main__': + unit_testing.setup_for_tests() + unittest.main() + From 40d6b3e626e2c32269d32a8e19f71f4704ac1218 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 17 Sep 2021 13:55:57 -0600 Subject: [PATCH 015/223] removing the capability to run ./subset_data.py directly... --- python/ctsm/subset_data.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 4d3fc5ebd6..85b420b229 100755 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -92,13 +92,6 @@ from logging.handlers import RotatingFileHandler from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter -# -- add python/ctsm to path -_CTSM_PYTHON = os.path.join( - os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" -) - -sys.path.insert(1, _CTSM_PYTHON) - from ctsm.site_and_regional.base_case import BaseCase from ctsm.site_and_regional.single_point_case import SinglePointCase @@ -483,6 +476,8 @@ def get_git_sha(): Returns Git short SHA for the currect directory. """ try: + + #os.abspath(__file__) sha = ( subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) .strip() @@ -887,7 +882,3 @@ def main(): else: # print help when no option is chosen get_parser().print_help() - - -if __name__ == "__main__": - main() From 7095f7e876d34a2a6b8f05998229b5da3da7016f Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 17 Sep 2021 13:58:24 -0600 Subject: [PATCH 016/223] not executable anymore... --- python/ctsm/subset_data.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 python/ctsm/subset_data.py diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py old mode 100755 new mode 100644 From aca92c20f892c183e5dff75524671da2387b7109 Mon Sep 17 00:00:00 2001 From: Francis Vitt Date: Wed, 20 Oct 2021 16:29:48 -0600 Subject: [PATCH 017/223] fire emissions fix --- src/biogeochem/CNFireEmissionsMod.F90 | 88 +++++++++++++++------------ 1 file changed, 49 insertions(+), 39 deletions(-) diff --git a/src/biogeochem/CNFireEmissionsMod.F90 b/src/biogeochem/CNFireEmissionsMod.F90 index d3344baaaa..645f074a7d 100644 --- a/src/biogeochem/CNFireEmissionsMod.F90 +++ b/src/biogeochem/CNFireEmissionsMod.F90 @@ -3,18 +3,18 @@ module CNFireEmissionsMod !----------------------------------------------------------------------- ! !DESCRIPTION: ! Gathers carbon emissions from fire sources to be sent to CAM-Chem via - ! the coupler .... + ! the coupler .... ! Created by F. Vitt, and revised by F. Li ! !USES: use shr_kind_mod, only : r8 => shr_kind_r8 use abortutils, only : endrun - use PatchType, only : patch + use PatchType, only : patch use decompMod, only : bounds_type use shr_fire_emis_mod, only : shr_fire_emis_comps_n, shr_fire_emis_comp_t, shr_fire_emis_linkedlist use shr_fire_emis_mod, only : shr_fire_emis_mechcomps_n, shr_fire_emis_mechcomps ! implicit none - private + private ! ! !PUBLIC MEMBER FUNCTIONS: public :: CNFireEmisUpdate @@ -71,7 +71,7 @@ subroutine Init(this, bounds) emis_cmp => emis_cmp%next_emiscomp enddo - call this%InitAllocate(bounds) + call this%InitAllocate(bounds) call this%InitHistory(bounds) end subroutine Init @@ -85,7 +85,7 @@ subroutine InitAllocate(this, bounds) ! !ARGUMENTS: class(fireemis_type) :: this - type(bounds_type), intent(in) :: bounds + type(bounds_type), intent(in) :: bounds ! ! !LOCAL VARIABLES: integer :: beg, end, i @@ -128,12 +128,13 @@ subroutine InitHistory(this, bounds) ! !ARGUMENTS: class(fireemis_type) :: this - type(bounds_type), intent(in) :: bounds + type(bounds_type), intent(in) :: bounds ! !LOCAL VARIABLES integer :: begp, endp integer :: imech, icomp type(shr_fire_emis_comp_t), pointer :: emis_cmp + character(len=16) :: units if (shr_fire_emis_mechcomps_n>0) then @@ -143,8 +144,13 @@ subroutine InitHistory(this, bounds) emis_cmp_loop: do while(associated(emis_cmp)) icomp = emis_cmp%index + if (emis_cmp%name(1:4) == 'num_') then + units = 'molecules/m2/sec' + else + units = 'kg/m2/sec' + endif - call hist_addfld1d (fname='FireComp_'//trim(emis_cmp%name), units='kg/m2/sec', & + call hist_addfld1d (fname='FireComp_'//trim(emis_cmp%name), units=units, & avgflag='A', long_name='fire emissions flux of '//trim(emis_cmp%name), & ptr_patch=this%comp(icomp)%emis, default='inactive') @@ -154,8 +160,13 @@ subroutine InitHistory(this, bounds) ! loop over atm chem mechanism species do imech = 1,shr_fire_emis_mechcomps_n + if (shr_fire_emis_mechcomps(imech)%name(1:4) == 'num_') then + units = 'molecules/m2/sec' + else + units = 'kg/m2/sec' + endif - call hist_addfld1d (fname='FireMech_'//trim(shr_fire_emis_mechcomps(imech)%name), units='kg/m2/sec', & + call hist_addfld1d (fname='FireMech_'//trim(shr_fire_emis_mechcomps(imech)%name), units=units, & avgflag='A', long_name='fire emissions flux of '//trim(shr_fire_emis_mechcomps(imech)%name), & ptr_patch=this%mech(imech)%emis, default='inactive') @@ -170,29 +181,29 @@ subroutine InitHistory(this, bounds) ptr_patch=this%ztop_patch, default='inactive') endif - + end subroutine InitHistory !----------------------------------------------------------------------- subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnveg_cs_inst, fireemis_inst ) use CNVegcarbonfluxType, only : cnveg_carbonflux_type - use CNVegCarbonStateType, only : cnveg_carbonstate_type + use CNVegCarbonStateType, only : cnveg_carbonstate_type use clm_varpar, only : ndecomp_pools, nlevdecomp use clm_varcon, only : dzsoi_decomp !ARGUMENTS: - type(bounds_type), intent(in) :: bounds + type(bounds_type), intent(in) :: bounds integer, intent(in) :: num_soilp ! number of soil pfts in filter integer, intent(in) :: filter_soilp(:) ! filter for soil pfts type(cnveg_carbonflux_type), intent(in) :: cnveg_cf_inst - type(cnveg_carbonstate_type),intent(in) :: cnveg_cs_inst + type(cnveg_carbonstate_type),intent(in) :: cnveg_cs_inst type(fireemis_type), intent(inout) :: fireemis_inst !LOCAL VARIABLES: real(r8) :: fire_flux - real(r8) :: fire_flux_lf - real(r8) :: fire_flux_lf1 + real(r8) :: fire_flux_lf + real(r8) :: fire_flux_lf1 type(shr_fire_emis_comp_t), pointer :: emis_cmp real(r8) :: emis_flux(shr_fire_emis_comps_n) integer :: fp,p,g,c ! indices @@ -201,7 +212,7 @@ subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnve if ( shr_fire_emis_mechcomps_n < 1) return - associate( & + associate( & fire_emis => fireemis_inst%fireflx_patch, & totfire => fireemis_inst%totfire, & mech => fireemis_inst%mech, & @@ -236,7 +247,7 @@ subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnve ! calculate fire emissions for non-bare ground PFTs if (patch%itype(p) > 0)then if(cnveg_cs_inst%totvegc_col(c) > 0._r8)then - fire_flux_lf1=0._r8 + fire_flux_lf1=0._r8 do l = 1, ndecomp_pools do j = 1, nlevdecomp fire_flux_lf1 = fire_flux_lf1 + & @@ -245,7 +256,7 @@ subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnve end do fire_flux_lf = fire_flux_lf1*cnveg_cs_inst%totvegc_patch(p)/cnveg_cs_inst%totvegc_col(c) else - fire_flux_lf=0._r8 + fire_flux_lf=0._r8 end if fire_flux = fire_flux_lf & + cnveg_cf_inst%m_leafc_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from leafc @@ -261,13 +272,13 @@ subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnve + cnveg_cf_inst%m_frootc_storage_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from frootc_storage + cnveg_cf_inst%m_frootc_xfer_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from frootc_xfer + cnveg_cf_inst%m_livecrootc_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from livecrootc - + cnveg_cf_inst%m_livecrootc_storage_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from livecrootc_storage + + cnveg_cf_inst%m_livecrootc_storage_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from livecrootc_storage + cnveg_cf_inst%m_livecrootc_xfer_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from livecrootc_xfer + cnveg_cf_inst%m_deadcrootc_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from deadcrootc + cnveg_cf_inst%m_deadcrootc_storage_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from deadcrootc_storage + cnveg_cf_inst%m_deadcrootc_xfer_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from deadcrootc_xfer + cnveg_cf_inst%m_gresp_storage_to_fire_patch (p) & ! (gC/m2/s) fire C emissions from gresp_storage - + cnveg_cf_inst%m_gresp_xfer_to_fire_patch (p) ! (gC/m2/s) fire C emissions from gresp_xfer + + cnveg_cf_inst%m_gresp_xfer_to_fire_patch (p) ! (gC/m2/s) fire C emissions from gresp_xfer ! for diagnostics totfire%emis(p) = fire_flux ! gC/m2/sec @@ -279,27 +290,27 @@ subroutine CNFireEmisUpdate(bounds, num_soilp, filter_soilp, cnveg_cf_inst, cnve epsilon = emis_cmp%emis_factors(patch%itype(p)) comp(icomp)%emis(p) = epsilon * fire_flux* 1.e-3_r8/0.5_r8 ! (to convert gC/m2/sec to kg species/m2/sec) - emis_flux(icomp) = emis_cmp%coeff*comp(icomp)%emis(p) + emis_flux(icomp) = comp(icomp)%emis(p) emis_cmp => emis_cmp%next_emiscomp enddo emis_cmp_loop - ! sum up the emissions compontent fluxes for the fluxes of chem mechanism compounds + ! sum up the emissions compontent fluxes for the fluxes of chem mechanism compounds do imech = 1,shr_fire_emis_mechcomps_n n_emis_comps = shr_fire_emis_mechcomps(imech)%n_emis_comps do icomp = 1,n_emis_comps ! loop over number of emission components that make up the nth mechanism compoud ii = shr_fire_emis_mechcomps(imech)%emis_comps(icomp)%ptr%index - fire_emis(p,imech) = fire_emis(p,imech) + emis_flux(ii) - mech(imech)%emis(p) = fire_emis(p,imech) + fire_emis(p,imech) = fire_emis(p,imech) + shr_fire_emis_mechcomps(imech)%coeffs(icomp)*emis_flux(ii) enddo + mech(imech)%emis(p) = fire_emis(p,imech) enddo ztop(p) = vert_dist_top( patch%itype(p) ) end if ! ivt(1:15 only) - enddo ! fp + enddo ! fp end associate end subroutine CNFireEmisUpdate @@ -307,23 +318,23 @@ end subroutine CNFireEmisUpdate ! Private methods !----------------------------------------------------------------------- !ztop compiled from Val Martin et al ACP 2010, Tosca et al. JGR 2011 and Jian et al., ACP 2013 -!st ztop updated based on Val Martin pers. communication Jan2015 +!st ztop updated based on Val Martin pers. communication Jan2015 !----------------------------------------------------------------------- -! not_vegetated 500 m +! not_vegetated 500 m !PFT1: needleleaf_evergreen_temperate_tree 4000 m !2: needleleaf_evergreen_boreal_tree 4000 m -!3: needleleaf_deciduous_boreal_tree 3000 m -!4: broadleaf_evergreen_tropical_tree 2500 m -!5: broadleaf_evergreen_temperate_tree 3000 m -!6: broadleaf_deciduous_tropical_tree 2500 m -!7: broadleaf_deciduous_temperate_tree 3000 m -!8: broadleaf_deciduous_boreal_tree 3000 m -!9: broadleaf_evergreen_shrub 2000 m -!10: broadleaf_deciduous_temperate_shrub 2000 m -!11: broadleaf_deciduous_boreal_shrub 2000 m -!12: c3_arctic_grass 1000 m -!13: c3_non-arctic_grass 1000 m -!14: c4_grass 1000 m +!3: needleleaf_deciduous_boreal_tree 3000 m +!4: broadleaf_evergreen_tropical_tree 2500 m +!5: broadleaf_evergreen_temperate_tree 3000 m +!6: broadleaf_deciduous_tropical_tree 2500 m +!7: broadleaf_deciduous_temperate_tree 3000 m +!8: broadleaf_deciduous_boreal_tree 3000 m +!9: broadleaf_evergreen_shrub 2000 m +!10: broadleaf_deciduous_temperate_shrub 2000 m +!11: broadleaf_deciduous_boreal_shrub 2000 m +!12: c3_arctic_grass 1000 m +!13: c3_non-arctic_grass 1000 m +!14: c4_grass 1000 m !15: c3_crop 1000 m !(and all new crops: 1000m) @@ -374,4 +385,3 @@ function vert_dist_top( veg_type ) result(ztop) end function vert_dist_top end module CNFireEmissionsMod - From 158ef55735e1eaf7165528922cedb186a3f0b002 Mon Sep 17 00:00:00 2001 From: Keith Oleson Date: Wed, 27 Oct 2021 11:29:11 -0600 Subject: [PATCH 018/223] Add ventilation flux to canyon floor as wasteheat --- src/biogeophys/BalanceCheckMod.F90 | 4 ++- src/biogeophys/EnergyFluxType.F90 | 34 +++++++++++++++++--- src/biogeophys/SoilFluxesMod.F90 | 4 ++- src/biogeophys/SoilTemperatureMod.F90 | 15 +++++++-- src/biogeophys/UrbBuildTempOleson2015Mod.F90 | 12 +++++-- 5 files changed, 58 insertions(+), 11 deletions(-) diff --git a/src/biogeophys/BalanceCheckMod.F90 b/src/biogeophys/BalanceCheckMod.F90 index 508e118b66..f33e1b8fd0 100644 --- a/src/biogeophys/BalanceCheckMod.F90 +++ b/src/biogeophys/BalanceCheckMod.F90 @@ -560,6 +560,7 @@ subroutine BalanceCheck( bounds, & eflx_lh_tot => energyflux_inst%eflx_lh_tot_patch , & ! Input: [real(r8) (:) ] total latent heat flux (W/m**2) [+ to atm] eflx_soil_grnd => energyflux_inst%eflx_soil_grnd_patch , & ! Input: [real(r8) (:) ] soil heat flux (W/m**2) [+ = into soil] eflx_wasteheat_patch => energyflux_inst%eflx_wasteheat_patch , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2) + eflx_vent_wasteheat_patch => energyflux_inst%eflx_vent_wasteheat_patch, & ! Input: [real(r8) (:) ] sensible heat flux from ventilation source of urban waste heat (W/m**2) eflx_heat_from_ac_patch => energyflux_inst%eflx_heat_from_ac_patch , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2) eflx_traffic_patch => energyflux_inst%eflx_traffic_patch , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2) eflx_dynbal => energyflux_inst%eflx_dynbal_grc , & ! Input: [real(r8) (:) ] energy conversion flux due to dynamic land cover change(W/m**2) [+ to atm] @@ -910,7 +911,8 @@ subroutine BalanceCheck( bounds, & errseb(p) = sabv(p) + sabg(p) & - eflx_lwrad_net(p) & - eflx_sh_tot(p) - eflx_lh_tot(p) - eflx_soil_grnd(p) & - + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) + + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) & + + eflx_vent_wasteheat_patch(p) end if !TODO MV - move this calculation to a better place - does not belong in BalanceCheck netrad(p) = fsa(p) - eflx_lwrad_net(p) diff --git a/src/biogeophys/EnergyFluxType.F90 b/src/biogeophys/EnergyFluxType.F90 index 5634d26e50..394b4decb5 100644 --- a/src/biogeophys/EnergyFluxType.F90 +++ b/src/biogeophys/EnergyFluxType.F90 @@ -8,7 +8,7 @@ module EnergyFluxType use shr_kind_mod , only : r8 => shr_kind_r8 use shr_log_mod , only : errMsg => shr_log_errMsg use clm_varcon , only : spval - use clm_varctl , only : use_biomass_heat_storage + use clm_varctl , only : use_biomass_heat_storage, iulog use decompMod , only : bounds_type use LandunitType , only : lun use ColumnType , only : col @@ -63,9 +63,11 @@ module EnergyFluxType real(r8), pointer :: eflx_anthro_patch (:) ! patch total anthropogenic heat flux (W/m**2) real(r8), pointer :: eflx_traffic_patch (:) ! patch traffic sensible heat flux (W/m**2) real(r8), pointer :: eflx_wasteheat_patch (:) ! patch sensible heat flux from domestic heating/cooling sources of waste heat (W/m**2) + real(r8), pointer :: eflx_vent_wasteheat_patch (:) ! patch sensible heat flux from ventilation source of urban waste heat (W/m**2) real(r8), pointer :: eflx_heat_from_ac_patch (:) ! patch sensible heat flux put back into canyon due to removal by AC (W/m**2) real(r8), pointer :: eflx_traffic_lun (:) ! lun traffic sensible heat flux (W/m**2) real(r8), pointer :: eflx_wasteheat_lun (:) ! lun sensible heat flux from domestic heating/cooling sources of waste heat (W/m**2) + real(r8), pointer :: eflx_vent_wasteheat_lun (:) ! lun sensible heat flux from ventilation source of urban waste heat (W/m**2) real(r8), pointer :: eflx_heat_from_ac_lun (:) ! lun sensible heat flux to be put back into canyon due to removal by AC (W/m**2) real(r8), pointer :: eflx_building_lun (:) ! lun building heat flux from change in interior building air temperature (W/m**2) real(r8), pointer :: eflx_urban_ac_lun (:) ! lun urban air conditioning flux (W/m**2) @@ -155,7 +157,7 @@ subroutine Init(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_buildtemp SHR_ASSERT_ALL_FL((ubound(t_grnd_col) == (/bounds%endc/)), sourcefile, __LINE__) call this%InitAllocate ( bounds ) - call this%InitHistory ( bounds, is_simple_buildtemp ) + call this%InitHistory ( bounds, is_simple_buildtemp, is_prog_buildtemp ) call this%InitCold ( bounds, t_grnd_col, is_simple_buildtemp, is_prog_buildtemp ) end subroutine Init @@ -226,6 +228,7 @@ subroutine InitAllocate(this, bounds) allocate( this%eflx_urban_ac_col (begc:endc)) ; this%eflx_urban_ac_col (:) = nan allocate( this%eflx_urban_heat_col (begc:endc)) ; this%eflx_urban_heat_col (:) = nan allocate( this%eflx_wasteheat_patch (begp:endp)) ; this%eflx_wasteheat_patch (:) = nan + allocate( this%eflx_vent_wasteheat_patch (begp:endp)) ; this%eflx_vent_wasteheat_patch(:) = nan allocate( this%eflx_traffic_patch (begp:endp)) ; this%eflx_traffic_patch (:) = nan allocate( this%eflx_heat_from_ac_patch (begp:endp)) ; this%eflx_heat_from_ac_patch (:) = nan allocate( this%eflx_heat_from_ac_lun (begl:endl)) ; this%eflx_heat_from_ac_lun (:) = nan @@ -234,6 +237,7 @@ subroutine InitAllocate(this, bounds) allocate( this%eflx_urban_heat_lun (begl:endl)) ; this%eflx_urban_heat_lun (:) = nan allocate( this%eflx_traffic_lun (begl:endl)) ; this%eflx_traffic_lun (:) = nan allocate( this%eflx_wasteheat_lun (begl:endl)) ; this%eflx_wasteheat_lun (:) = nan + allocate( this%eflx_vent_wasteheat_lun (begl:endl)) ; this%eflx_vent_wasteheat_lun (:) = nan allocate( this%eflx_anthro_patch (begp:endp)) ; this%eflx_anthro_patch (:) = nan allocate( this%dgnetdT_patch (begp:endp)) ; this%dgnetdT_patch (:) = nan @@ -276,7 +280,7 @@ subroutine InitAllocate(this, bounds) end subroutine InitAllocate !------------------------------------------------------------------------ - subroutine InitHistory(this, bounds, is_simple_buildtemp) + subroutine InitHistory(this, bounds, is_simple_buildtemp, is_prog_buildtemp) ! ! !DESCRIPTION: ! Setup fields that can be output to history files @@ -293,6 +297,7 @@ subroutine InitHistory(this, bounds, is_simple_buildtemp) class(energyflux_type) :: this type(bounds_type), intent(in) :: bounds logical , intent(in) :: is_simple_buildtemp ! If using simple building temp method + logical , intent(in) :: is_prog_buildtemp ! If using prognostic building temp method ! ! !LOCAL VARIABLES: integer :: begp, endp @@ -617,6 +622,13 @@ subroutine InitHistory(this, bounds, is_simple_buildtemp) avgflag='A', long_name='sensible heat flux from heating/cooling sources of urban waste heat', & ptr_patch=this%eflx_wasteheat_patch, set_nourb=0._r8, c2l_scale_type='urbanf') + if ( is_prog_buildtemp )then + this%eflx_vent_wasteheat_patch(begp:endp) = spval + call hist_addfld1d (fname='VENT_WASTEHEAT', units='W/m^2', & + avgflag='A', long_name='sensible heat flux from ventilation source of urban waste heat', & + ptr_patch=this%eflx_vent_wasteheat_patch, set_nourb=0._r8, c2l_scale_type='urbanf') + end if + this%eflx_heat_from_ac_patch(begp:endp) = spval call hist_addfld1d (fname='HEAT_FROM_AC', units='W/m^2', & avgflag='A', long_name='sensible heat flux put into canyon due to heat removed from air conditioning', & @@ -694,7 +706,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build use landunit_varcon , only : istwet, istsoil, istdlak use column_varcon , only : icol_road_imperv, icol_roof, icol_sunwall use column_varcon , only : icol_shadewall, icol_road_perv - use clm_varctl , only : iulog, use_vancouver, use_mexicocity + use clm_varctl , only : use_vancouver, use_mexicocity implicit none ! ! !ARGUMENTS: @@ -751,6 +763,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build if (.not. lun%urbpoi(l)) then this%eflx_traffic_lun(l) = spval this%eflx_wasteheat_lun(l) = spval + this%eflx_vent_wasteheat_lun(l) = spval if ( is_prog_buildtemp )then this%eflx_building_lun(l) = 0._r8 this%eflx_urban_ac_lun(l) = 0._r8 @@ -758,6 +771,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build end if this%eflx_wasteheat_patch(p) = 0._r8 + this%eflx_vent_wasteheat_patch(p) = 0._r8 this%eflx_heat_from_ac_patch(p) = 0._r8 this%eflx_traffic_patch(p) = 0._r8 if ( is_simple_buildtemp) & @@ -767,6 +781,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build this%eflx_building_lun(l) = 0._r8 this%eflx_urban_ac_lun(l) = 0._r8 this%eflx_urban_heat_lun(l) = 0._r8 + this%eflx_vent_wasteheat_lun(l) = 0._r8 end if end if end do @@ -861,6 +876,16 @@ subroutine Restart(this, bounds, ncid, flag, is_simple_buildtemp, is_prog_buildt else this%eflx_urban_heat_lun = 0.0_r8 end if + call restartvar(ncid=ncid, flag=flag, varname='EFLX_VENT_WASTEHEAT', xtype=ncd_double, & + dim1name='landunit', & + long_name='urban wasteheat from ventilation', units='watt/m^2', & + interpinic_flag='interp', readvar=readvar, data=this%eflx_vent_wasteheat_lun) + if (flag=='read' .and. .not. readvar) then + if (masterproc) write(iulog,*) "can't find EFLX_VENT_WASTEHEAT in initial file..." + if (masterproc) write(iulog,*) "Initialize EFLX_VENT_WASTEHEAT to zero" + this%eflx_vent_wasteheat_lun(bounds%begl:bounds%endl) = 0._r8 + end if + else if ( is_simple_buildtemp )then call restartvar(ncid=ncid, flag=flag, varname='URBAN_AC', xtype=ncd_double, & dim1name='column', & @@ -972,7 +997,6 @@ subroutine UpdateAccVars (this, bounds) use shr_const_mod , only : SHR_CONST_CDAY, SHR_CONST_TKFRZ use clm_time_manager , only : get_step_size, get_nstep, is_end_curr_day, get_curr_date use accumulMod , only : update_accum_field, extract_accum_field, accumResetVal - use clm_varctl , only : iulog use abortutils , only : endrun ! ! !ARGUMENTS: diff --git a/src/biogeophys/SoilFluxesMod.F90 b/src/biogeophys/SoilFluxesMod.F90 index bb88042797..506bf32b5c 100644 --- a/src/biogeophys/SoilFluxesMod.F90 +++ b/src/biogeophys/SoilFluxesMod.F90 @@ -115,6 +115,7 @@ subroutine SoilFluxes (bounds, num_urbanl, filter_urbanl, & htvp => energyflux_inst%htvp_col , & ! Input: [real(r8) (:) ] latent heat of vapor of water (or sublimation) [j/kg] eflx_building_heat_errsoi=> energyflux_inst%eflx_building_heat_errsoi_col , & ! Input: [real(r8) (:)] heat flux to interior surface of walls and roof for errsoi check (W m-2) eflx_wasteheat_patch => energyflux_inst%eflx_wasteheat_patch , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2) + eflx_vent_wasteheat_patch => energyflux_inst%eflx_vent_wasteheat_patch, & ! Input: [real(r8) (:)] sensible heat flux from ventilation source of uban waste heat (W/m**2) eflx_heat_from_ac_patch => energyflux_inst%eflx_heat_from_ac_patch , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2) eflx_traffic_patch => energyflux_inst%eflx_traffic_patch , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2) dlrad => energyflux_inst%dlrad_patch , & ! Input: [real(r8) (:) ] downward longwave radiation below the canopy [W/m2] @@ -278,7 +279,8 @@ subroutine SoilFluxes (bounds, num_urbanl, filter_urbanl, & eflx_soil_grnd(p) = sabg(p) + dlrad(p) & - eflx_lwrad_net(p) - eflx_lwrad_del(p) & - (eflx_sh_grnd(p) + qflx_evap_soi(p)*htvp(c) + qflx_tran_veg(p)*hvap) & - + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) + + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) & + + eflx_vent_wasteheat_patch(p) eflx_soil_grnd_u(p) = eflx_soil_grnd(p) end if diff --git a/src/biogeophys/SoilTemperatureMod.F90 b/src/biogeophys/SoilTemperatureMod.F90 index ba4432cba2..3689287df2 100644 --- a/src/biogeophys/SoilTemperatureMod.F90 +++ b/src/biogeophys/SoilTemperatureMod.F90 @@ -1434,7 +1434,7 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, & use clm_varcon , only : sb, hvap use column_varcon , only : icol_road_perv, icol_road_imperv use clm_varpar , only : nlevsno, max_patch_per_col - use UrbanParamsType, only : IsSimpleBuildTemp + use UrbanParamsType, only : IsSimpleBuildTemp, IsProgBuildTemp ! ! !ARGUMENTS: implicit none @@ -1506,6 +1506,7 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, & dlrad => energyflux_inst%dlrad_patch , & ! Input: [real(r8) (:) ] downward longwave radiation blow the canopy [W/m2] eflx_traffic => energyflux_inst%eflx_traffic_lun , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2) eflx_wasteheat => energyflux_inst%eflx_wasteheat_lun , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2) + eflx_vent_wasteheat => energyflux_inst%eflx_vent_wasteheat_lun , & ! Input: [real(r8) (:) ] sensible heat flux from ventilation source of urban waste heat (W/m**2) eflx_heat_from_ac => energyflux_inst%eflx_heat_from_ac_lun , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2) eflx_sh_snow => energyflux_inst%eflx_sh_snow_patch , & ! Input: [real(r8) (:) ] sensible heat flux from snow (W/m**2) [+ to atm] eflx_sh_soil => energyflux_inst%eflx_sh_soil_patch , & ! Input: [real(r8) (:) ] sensible heat flux from soil (W/m**2) [+ to atm] @@ -1513,6 +1514,7 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, & eflx_sh_grnd => energyflux_inst%eflx_sh_grnd_patch , & ! Input: [real(r8) (:) ] sensible heat flux from ground (W/m**2) [+ to atm] eflx_lwrad_net => energyflux_inst%eflx_lwrad_net_patch , & ! Input: [real(r8) (:) ] net infrared (longwave) rad (W/m**2) [+ = to atm] eflx_wasteheat_patch => energyflux_inst%eflx_wasteheat_patch , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2) + eflx_vent_wasteheat_patch => energyflux_inst%eflx_vent_wasteheat_patch , & ! Input:[real(r8) (:) ] sensible heat flux from ventilation source of urban waste heat (W/m**2) eflx_heat_from_ac_patch => energyflux_inst%eflx_heat_from_ac_patch , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2) eflx_traffic_patch => energyflux_inst%eflx_traffic_patch , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2) eflx_anthro => energyflux_inst%eflx_anthro_patch , & ! Input: [real(r8) (:) ] total anthropogenic heat flux (W/m**2) @@ -1581,11 +1583,19 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, & ! All wasteheat and traffic flux goes into canyon floor if (col%itype(c) == icol_road_perv .or. col%itype(c) == icol_road_imperv) then + ! Note that we divide the following landunit variables by 1-wtlunit_roof which + ! essentially converts the flux from W/m2 of urban area to W/m2 of canyon floor area eflx_wasteheat_patch(p) = eflx_wasteheat(l)/(1._r8-lun%wtlunit_roof(l)) + if ( IsSimpleBuildTemp() ) then + eflx_vent_wasteheat_patch(p) = 0._r8 + else if ( IsProgBuildTemp() ) then + eflx_vent_wasteheat_patch(p) = eflx_vent_wasteheat(l)/(1._r8-lun%wtlunit_roof(l)) + end if eflx_heat_from_ac_patch(p) = eflx_heat_from_ac(l)/(1._r8-lun%wtlunit_roof(l)) eflx_traffic_patch(p) = eflx_traffic(l)/(1._r8-lun%wtlunit_roof(l)) else eflx_wasteheat_patch(p) = 0._r8 + eflx_vent_wasteheat_patch(p) = 0._r8 eflx_heat_from_ac_patch(p) = 0._r8 eflx_traffic_patch(p) = 0._r8 end if @@ -1594,7 +1604,8 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, & eflx_gnet(p) = sabg(p) + dlrad(p) & - eflx_lwrad_net(p) & - (eflx_sh_grnd(p) + qflx_evap_soi(p)*htvp(c) + qflx_tran_veg(p)*hvap) & - + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) + + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) & + + eflx_vent_wasteheat_patch(p) if ( IsSimpleBuildTemp() ) then eflx_anthro(p) = eflx_wasteheat_patch(p) + eflx_traffic_patch(p) end if diff --git a/src/biogeophys/UrbBuildTempOleson2015Mod.F90 b/src/biogeophys/UrbBuildTempOleson2015Mod.F90 index 0ace2868d6..f6ffdffcea 100644 --- a/src/biogeophys/UrbBuildTempOleson2015Mod.F90 +++ b/src/biogeophys/UrbBuildTempOleson2015Mod.F90 @@ -329,7 +329,8 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec, eflx_building => energyflux_inst%eflx_building_lun , & ! Output: [real(r8) (:)] building heat flux from change in interior building air temperature (W/m**2) eflx_urban_ac => energyflux_inst%eflx_urban_ac_lun , & ! Output: [real(r8) (:)] urban air conditioning flux (W/m**2) - eflx_urban_heat => energyflux_inst%eflx_urban_heat_lun & ! Output: [real(r8) (:)] urban heating flux (W/m**2) + eflx_urban_heat => energyflux_inst%eflx_urban_heat_lun,& ! Output: [real(r8) (:)] urban heating flux (W/m**2) + eflx_vent_wasteheat => energyflux_inst%eflx_vent_wasteheat_lun & ! Output: [real(r8) (:)] wasteheat from ventilation (W/m**2) ) ! Get step size @@ -673,7 +674,7 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec, end if end do - ! Energy balance checks + ! Energy balance checks and wasteheat from ventilation do fl = 1,num_urbanl l = filter_urbanl(fl) if (urbpoi(l)) then @@ -899,6 +900,13 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec, write (iulog,*) 'clm model is stopping' call endrun(subgrid_index=l, subgrid_level=subgrid_level_landunit) end if + + ! Wasteheat from ventilation. We keep this as a separate wasteheat term because it is balanced by the + ! opposite flux inside the building. It is added as a flux to the canyon floor in SoilTemperatureMod. + ! Note that we multiply it here by wtlunit_roof which converts it from W/m2 of building area to W/m2 + ! of urban area. eflx_urban_ac and eflx_urban_heat are treated similarly below. + eflx_vent_wasteheat(l) = wtlunit_roof(l) * ( - ht_roof(l)*(vent_ach/3600._r8) & + * rho_dair(l) * cpair * (taf(l) - t_building(l)) ) end if end do From e8779c81454a489dddb234a0b0c44574bd8b21bf Mon Sep 17 00:00:00 2001 From: Keith Oleson Date: Thu, 28 Oct 2021 16:21:44 -0600 Subject: [PATCH 019/223] Change variable eflx_vent_wasteheat to eflx_ventilation everywhere --- src/biogeophys/BalanceCheckMod.F90 | 4 +-- src/biogeophys/EnergyFluxType.F90 | 34 ++++++++++---------- src/biogeophys/SoilFluxesMod.F90 | 4 +-- src/biogeophys/SoilTemperatureMod.F90 | 12 +++---- src/biogeophys/UrbBuildTempOleson2015Mod.F90 | 19 +++++------ 5 files changed, 37 insertions(+), 36 deletions(-) diff --git a/src/biogeophys/BalanceCheckMod.F90 b/src/biogeophys/BalanceCheckMod.F90 index f33e1b8fd0..07923dae55 100644 --- a/src/biogeophys/BalanceCheckMod.F90 +++ b/src/biogeophys/BalanceCheckMod.F90 @@ -560,7 +560,7 @@ subroutine BalanceCheck( bounds, & eflx_lh_tot => energyflux_inst%eflx_lh_tot_patch , & ! Input: [real(r8) (:) ] total latent heat flux (W/m**2) [+ to atm] eflx_soil_grnd => energyflux_inst%eflx_soil_grnd_patch , & ! Input: [real(r8) (:) ] soil heat flux (W/m**2) [+ = into soil] eflx_wasteheat_patch => energyflux_inst%eflx_wasteheat_patch , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2) - eflx_vent_wasteheat_patch => energyflux_inst%eflx_vent_wasteheat_patch, & ! Input: [real(r8) (:) ] sensible heat flux from ventilation source of urban waste heat (W/m**2) + eflx_ventilation_patch => energyflux_inst%eflx_ventilation_patch , & ! Input: [real(r8) (:) ] sensible heat flux from building ventilation (W/m**2) eflx_heat_from_ac_patch => energyflux_inst%eflx_heat_from_ac_patch , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2) eflx_traffic_patch => energyflux_inst%eflx_traffic_patch , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2) eflx_dynbal => energyflux_inst%eflx_dynbal_grc , & ! Input: [real(r8) (:) ] energy conversion flux due to dynamic land cover change(W/m**2) [+ to atm] @@ -912,7 +912,7 @@ subroutine BalanceCheck( bounds, & - eflx_lwrad_net(p) & - eflx_sh_tot(p) - eflx_lh_tot(p) - eflx_soil_grnd(p) & + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) & - + eflx_vent_wasteheat_patch(p) + + eflx_ventilation_patch(p) end if !TODO MV - move this calculation to a better place - does not belong in BalanceCheck netrad(p) = fsa(p) - eflx_lwrad_net(p) diff --git a/src/biogeophys/EnergyFluxType.F90 b/src/biogeophys/EnergyFluxType.F90 index 394b4decb5..685663b83d 100644 --- a/src/biogeophys/EnergyFluxType.F90 +++ b/src/biogeophys/EnergyFluxType.F90 @@ -63,11 +63,11 @@ module EnergyFluxType real(r8), pointer :: eflx_anthro_patch (:) ! patch total anthropogenic heat flux (W/m**2) real(r8), pointer :: eflx_traffic_patch (:) ! patch traffic sensible heat flux (W/m**2) real(r8), pointer :: eflx_wasteheat_patch (:) ! patch sensible heat flux from domestic heating/cooling sources of waste heat (W/m**2) - real(r8), pointer :: eflx_vent_wasteheat_patch (:) ! patch sensible heat flux from ventilation source of urban waste heat (W/m**2) + real(r8), pointer :: eflx_ventilation_patch (:) ! patch sensible heat flux from building ventilation (W/m**2) real(r8), pointer :: eflx_heat_from_ac_patch (:) ! patch sensible heat flux put back into canyon due to removal by AC (W/m**2) real(r8), pointer :: eflx_traffic_lun (:) ! lun traffic sensible heat flux (W/m**2) real(r8), pointer :: eflx_wasteheat_lun (:) ! lun sensible heat flux from domestic heating/cooling sources of waste heat (W/m**2) - real(r8), pointer :: eflx_vent_wasteheat_lun (:) ! lun sensible heat flux from ventilation source of urban waste heat (W/m**2) + real(r8), pointer :: eflx_ventilation_lun (:) ! lun sensible heat flux from building ventilation (W/m**2) real(r8), pointer :: eflx_heat_from_ac_lun (:) ! lun sensible heat flux to be put back into canyon due to removal by AC (W/m**2) real(r8), pointer :: eflx_building_lun (:) ! lun building heat flux from change in interior building air temperature (W/m**2) real(r8), pointer :: eflx_urban_ac_lun (:) ! lun urban air conditioning flux (W/m**2) @@ -228,7 +228,7 @@ subroutine InitAllocate(this, bounds) allocate( this%eflx_urban_ac_col (begc:endc)) ; this%eflx_urban_ac_col (:) = nan allocate( this%eflx_urban_heat_col (begc:endc)) ; this%eflx_urban_heat_col (:) = nan allocate( this%eflx_wasteheat_patch (begp:endp)) ; this%eflx_wasteheat_patch (:) = nan - allocate( this%eflx_vent_wasteheat_patch (begp:endp)) ; this%eflx_vent_wasteheat_patch(:) = nan + allocate( this%eflx_ventilation_patch (begp:endp)) ; this%eflx_ventilation_patch (:) = nan allocate( this%eflx_traffic_patch (begp:endp)) ; this%eflx_traffic_patch (:) = nan allocate( this%eflx_heat_from_ac_patch (begp:endp)) ; this%eflx_heat_from_ac_patch (:) = nan allocate( this%eflx_heat_from_ac_lun (begl:endl)) ; this%eflx_heat_from_ac_lun (:) = nan @@ -237,7 +237,7 @@ subroutine InitAllocate(this, bounds) allocate( this%eflx_urban_heat_lun (begl:endl)) ; this%eflx_urban_heat_lun (:) = nan allocate( this%eflx_traffic_lun (begl:endl)) ; this%eflx_traffic_lun (:) = nan allocate( this%eflx_wasteheat_lun (begl:endl)) ; this%eflx_wasteheat_lun (:) = nan - allocate( this%eflx_vent_wasteheat_lun (begl:endl)) ; this%eflx_vent_wasteheat_lun (:) = nan + allocate( this%eflx_ventilation_lun (begl:endl)) ; this%eflx_ventilation_lun (:) = nan allocate( this%eflx_anthro_patch (begp:endp)) ; this%eflx_anthro_patch (:) = nan allocate( this%dgnetdT_patch (begp:endp)) ; this%dgnetdT_patch (:) = nan @@ -623,10 +623,10 @@ subroutine InitHistory(this, bounds, is_simple_buildtemp, is_prog_buildtemp) ptr_patch=this%eflx_wasteheat_patch, set_nourb=0._r8, c2l_scale_type='urbanf') if ( is_prog_buildtemp )then - this%eflx_vent_wasteheat_patch(begp:endp) = spval - call hist_addfld1d (fname='VENT_WASTEHEAT', units='W/m^2', & - avgflag='A', long_name='sensible heat flux from ventilation source of urban waste heat', & - ptr_patch=this%eflx_vent_wasteheat_patch, set_nourb=0._r8, c2l_scale_type='urbanf') + this%eflx_ventilation_patch(begp:endp) = spval + call hist_addfld1d (fname='VENTILATION', units='W/m^2', & + avgflag='A', long_name='sensible heat flux from building ventilation', & + ptr_patch=this%eflx_ventilation_patch, set_nourb=0._r8, c2l_scale_type='urbanf') end if this%eflx_heat_from_ac_patch(begp:endp) = spval @@ -763,7 +763,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build if (.not. lun%urbpoi(l)) then this%eflx_traffic_lun(l) = spval this%eflx_wasteheat_lun(l) = spval - this%eflx_vent_wasteheat_lun(l) = spval + this%eflx_ventilation_lun(l) = spval if ( is_prog_buildtemp )then this%eflx_building_lun(l) = 0._r8 this%eflx_urban_ac_lun(l) = 0._r8 @@ -771,7 +771,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build end if this%eflx_wasteheat_patch(p) = 0._r8 - this%eflx_vent_wasteheat_patch(p) = 0._r8 + this%eflx_ventilation_patch(p) = 0._r8 this%eflx_heat_from_ac_patch(p) = 0._r8 this%eflx_traffic_patch(p) = 0._r8 if ( is_simple_buildtemp) & @@ -781,7 +781,7 @@ subroutine InitCold(this, bounds, t_grnd_col, is_simple_buildtemp, is_prog_build this%eflx_building_lun(l) = 0._r8 this%eflx_urban_ac_lun(l) = 0._r8 this%eflx_urban_heat_lun(l) = 0._r8 - this%eflx_vent_wasteheat_lun(l) = 0._r8 + this%eflx_ventilation_lun(l)= 0._r8 end if end if end do @@ -876,14 +876,14 @@ subroutine Restart(this, bounds, ncid, flag, is_simple_buildtemp, is_prog_buildt else this%eflx_urban_heat_lun = 0.0_r8 end if - call restartvar(ncid=ncid, flag=flag, varname='EFLX_VENT_WASTEHEAT', xtype=ncd_double, & + call restartvar(ncid=ncid, flag=flag, varname='EFLX_VENTILATION', xtype=ncd_double, & dim1name='landunit', & - long_name='urban wasteheat from ventilation', units='watt/m^2', & - interpinic_flag='interp', readvar=readvar, data=this%eflx_vent_wasteheat_lun) + long_name='sensible heat flux from building ventilation', units='watt/m^2', & + interpinic_flag='interp', readvar=readvar, data=this%eflx_ventilation_lun) if (flag=='read' .and. .not. readvar) then - if (masterproc) write(iulog,*) "can't find EFLX_VENT_WASTEHEAT in initial file..." - if (masterproc) write(iulog,*) "Initialize EFLX_VENT_WASTEHEAT to zero" - this%eflx_vent_wasteheat_lun(bounds%begl:bounds%endl) = 0._r8 + if (masterproc) write(iulog,*) "can't find EFLX_VENTILATION in initial file..." + if (masterproc) write(iulog,*) "Initialize EFLX_VENTILATION to zero" + this%eflx_ventilation_lun(bounds%begl:bounds%endl) = 0._r8 end if else if ( is_simple_buildtemp )then diff --git a/src/biogeophys/SoilFluxesMod.F90 b/src/biogeophys/SoilFluxesMod.F90 index 506bf32b5c..5f4030c6e1 100644 --- a/src/biogeophys/SoilFluxesMod.F90 +++ b/src/biogeophys/SoilFluxesMod.F90 @@ -115,7 +115,7 @@ subroutine SoilFluxes (bounds, num_urbanl, filter_urbanl, & htvp => energyflux_inst%htvp_col , & ! Input: [real(r8) (:) ] latent heat of vapor of water (or sublimation) [j/kg] eflx_building_heat_errsoi=> energyflux_inst%eflx_building_heat_errsoi_col , & ! Input: [real(r8) (:)] heat flux to interior surface of walls and roof for errsoi check (W m-2) eflx_wasteheat_patch => energyflux_inst%eflx_wasteheat_patch , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2) - eflx_vent_wasteheat_patch => energyflux_inst%eflx_vent_wasteheat_patch, & ! Input: [real(r8) (:)] sensible heat flux from ventilation source of uban waste heat (W/m**2) + eflx_ventilation_patch => energyflux_inst%eflx_ventilation_patch , & ! Input: [real(r8) (:) ] sensible heat flux from building ventilation (W/m**2) eflx_heat_from_ac_patch => energyflux_inst%eflx_heat_from_ac_patch , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2) eflx_traffic_patch => energyflux_inst%eflx_traffic_patch , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2) dlrad => energyflux_inst%dlrad_patch , & ! Input: [real(r8) (:) ] downward longwave radiation below the canopy [W/m2] @@ -280,7 +280,7 @@ subroutine SoilFluxes (bounds, num_urbanl, filter_urbanl, & - eflx_lwrad_net(p) - eflx_lwrad_del(p) & - (eflx_sh_grnd(p) + qflx_evap_soi(p)*htvp(c) + qflx_tran_veg(p)*hvap) & + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) & - + eflx_vent_wasteheat_patch(p) + + eflx_ventilation_patch(p) eflx_soil_grnd_u(p) = eflx_soil_grnd(p) end if diff --git a/src/biogeophys/SoilTemperatureMod.F90 b/src/biogeophys/SoilTemperatureMod.F90 index 3689287df2..513413e8a9 100644 --- a/src/biogeophys/SoilTemperatureMod.F90 +++ b/src/biogeophys/SoilTemperatureMod.F90 @@ -1506,7 +1506,7 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, & dlrad => energyflux_inst%dlrad_patch , & ! Input: [real(r8) (:) ] downward longwave radiation blow the canopy [W/m2] eflx_traffic => energyflux_inst%eflx_traffic_lun , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2) eflx_wasteheat => energyflux_inst%eflx_wasteheat_lun , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2) - eflx_vent_wasteheat => energyflux_inst%eflx_vent_wasteheat_lun , & ! Input: [real(r8) (:) ] sensible heat flux from ventilation source of urban waste heat (W/m**2) + eflx_ventilation => energyflux_inst%eflx_ventilation_lun , & ! Input: [real(r8) (:) ] sensible heat flux from building ventilation (W/m**2) eflx_heat_from_ac => energyflux_inst%eflx_heat_from_ac_lun , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2) eflx_sh_snow => energyflux_inst%eflx_sh_snow_patch , & ! Input: [real(r8) (:) ] sensible heat flux from snow (W/m**2) [+ to atm] eflx_sh_soil => energyflux_inst%eflx_sh_soil_patch , & ! Input: [real(r8) (:) ] sensible heat flux from soil (W/m**2) [+ to atm] @@ -1514,7 +1514,7 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, & eflx_sh_grnd => energyflux_inst%eflx_sh_grnd_patch , & ! Input: [real(r8) (:) ] sensible heat flux from ground (W/m**2) [+ to atm] eflx_lwrad_net => energyflux_inst%eflx_lwrad_net_patch , & ! Input: [real(r8) (:) ] net infrared (longwave) rad (W/m**2) [+ = to atm] eflx_wasteheat_patch => energyflux_inst%eflx_wasteheat_patch , & ! Input: [real(r8) (:) ] sensible heat flux from urban heating/cooling sources of waste heat (W/m**2) - eflx_vent_wasteheat_patch => energyflux_inst%eflx_vent_wasteheat_patch , & ! Input:[real(r8) (:) ] sensible heat flux from ventilation source of urban waste heat (W/m**2) + eflx_ventilation_patch => energyflux_inst%eflx_ventilation_patch , & ! Input: [real(r8) (:) ] sensible heat flux from building ventilation (W/m**2) eflx_heat_from_ac_patch => energyflux_inst%eflx_heat_from_ac_patch , & ! Input: [real(r8) (:) ] sensible heat flux put back into canyon due to removal by AC (W/m**2) eflx_traffic_patch => energyflux_inst%eflx_traffic_patch , & ! Input: [real(r8) (:) ] traffic sensible heat flux (W/m**2) eflx_anthro => energyflux_inst%eflx_anthro_patch , & ! Input: [real(r8) (:) ] total anthropogenic heat flux (W/m**2) @@ -1587,15 +1587,15 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, & ! essentially converts the flux from W/m2 of urban area to W/m2 of canyon floor area eflx_wasteheat_patch(p) = eflx_wasteheat(l)/(1._r8-lun%wtlunit_roof(l)) if ( IsSimpleBuildTemp() ) then - eflx_vent_wasteheat_patch(p) = 0._r8 + eflx_ventilation_patch(p) = 0._r8 else if ( IsProgBuildTemp() ) then - eflx_vent_wasteheat_patch(p) = eflx_vent_wasteheat(l)/(1._r8-lun%wtlunit_roof(l)) + eflx_ventilation_patch(p) = eflx_ventilation(l)/(1._r8-lun%wtlunit_roof(l)) end if eflx_heat_from_ac_patch(p) = eflx_heat_from_ac(l)/(1._r8-lun%wtlunit_roof(l)) eflx_traffic_patch(p) = eflx_traffic(l)/(1._r8-lun%wtlunit_roof(l)) else eflx_wasteheat_patch(p) = 0._r8 - eflx_vent_wasteheat_patch(p) = 0._r8 + eflx_ventilation_patch(p) = 0._r8 eflx_heat_from_ac_patch(p) = 0._r8 eflx_traffic_patch(p) = 0._r8 end if @@ -1605,7 +1605,7 @@ subroutine ComputeGroundHeatFluxAndDeriv(bounds, num_nolakec, filter_nolakec, & - eflx_lwrad_net(p) & - (eflx_sh_grnd(p) + qflx_evap_soi(p)*htvp(c) + qflx_tran_veg(p)*hvap) & + eflx_wasteheat_patch(p) + eflx_heat_from_ac_patch(p) + eflx_traffic_patch(p) & - + eflx_vent_wasteheat_patch(p) + + eflx_ventilation_patch(p) if ( IsSimpleBuildTemp() ) then eflx_anthro(p) = eflx_wasteheat_patch(p) + eflx_traffic_patch(p) end if diff --git a/src/biogeophys/UrbBuildTempOleson2015Mod.F90 b/src/biogeophys/UrbBuildTempOleson2015Mod.F90 index f6ffdffcea..0bf7b38204 100644 --- a/src/biogeophys/UrbBuildTempOleson2015Mod.F90 +++ b/src/biogeophys/UrbBuildTempOleson2015Mod.F90 @@ -330,7 +330,7 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec, eflx_building => energyflux_inst%eflx_building_lun , & ! Output: [real(r8) (:)] building heat flux from change in interior building air temperature (W/m**2) eflx_urban_ac => energyflux_inst%eflx_urban_ac_lun , & ! Output: [real(r8) (:)] urban air conditioning flux (W/m**2) eflx_urban_heat => energyflux_inst%eflx_urban_heat_lun,& ! Output: [real(r8) (:)] urban heating flux (W/m**2) - eflx_vent_wasteheat => energyflux_inst%eflx_vent_wasteheat_lun & ! Output: [real(r8) (:)] wasteheat from ventilation (W/m**2) + eflx_ventilation => energyflux_inst%eflx_ventilation_lun & ! Output: [real(r8) (:)] sensible heat flux from building ventilation (W/m**2) ) ! Get step size @@ -674,7 +674,7 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec, end if end do - ! Energy balance checks and wasteheat from ventilation + ! Energy balance checks do fl = 1,num_urbanl l = filter_urbanl(fl) if (urbpoi(l)) then @@ -900,13 +900,6 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec, write (iulog,*) 'clm model is stopping' call endrun(subgrid_index=l, subgrid_level=subgrid_level_landunit) end if - - ! Wasteheat from ventilation. We keep this as a separate wasteheat term because it is balanced by the - ! opposite flux inside the building. It is added as a flux to the canyon floor in SoilTemperatureMod. - ! Note that we multiply it here by wtlunit_roof which converts it from W/m2 of building area to W/m2 - ! of urban area. eflx_urban_ac and eflx_urban_heat are treated similarly below. - eflx_vent_wasteheat(l) = wtlunit_roof(l) * ( - ht_roof(l)*(vent_ach/3600._r8) & - * rho_dair(l) * cpair * (taf(l) - t_building(l)) ) end if end do @@ -938,6 +931,14 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec, eflx_urban_heat(l) = 0._r8 end if eflx_building(l) = wtlunit_roof(l) * (ht_roof(l) * rho_dair(l)*cpair/dtime) * (t_building(l) - t_building_bef(l)) + + ! Sensible heat flux from ventilation. It is added as a flux to the canyon floor in SoilTemperatureMod. + ! Note that we multiply it here by wtlunit_roof which converts it from W/m2 of building area to W/m2 + ! of urban area. eflx_urban_ac and eflx_urban_heat are treated similarly above. This flux is balanced + ! by an equal and opposite flux into/out of the building and so has a net effect of zero on the energy balance + ! of the urban landunit. + eflx_ventilation(l) = wtlunit_roof(l) * ( - ht_roof(l)*(vent_ach/3600._r8) & + * rho_dair(l) * cpair * (taf(l) - t_building(l)) ) end if end do From 74360bfea5af4ae8a020ffa9a926f7e472f7d8f5 Mon Sep 17 00:00:00 2001 From: Keith Oleson Date: Fri, 29 Oct 2021 08:58:33 -0600 Subject: [PATCH 020/223] Put eflx_ventilation calculation back to original position --- src/biogeophys/UrbBuildTempOleson2015Mod.F90 | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/biogeophys/UrbBuildTempOleson2015Mod.F90 b/src/biogeophys/UrbBuildTempOleson2015Mod.F90 index 0bf7b38204..bf8b68c7eb 100644 --- a/src/biogeophys/UrbBuildTempOleson2015Mod.F90 +++ b/src/biogeophys/UrbBuildTempOleson2015Mod.F90 @@ -900,6 +900,14 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec, write (iulog,*) 'clm model is stopping' call endrun(subgrid_index=l, subgrid_level=subgrid_level_landunit) end if + + ! Sensible heat flux from ventilation. It is added as a flux to the canyon floor in SoilTemperatureMod. + ! Note that we multiply it here by wtlunit_roof which converts it from W/m2 of building area to W/m2 + ! of urban area. eflx_urban_ac and eflx_urban_heat are treated similarly below. This flux is balanced + ! by an equal and opposite flux into/out of the building and so has a net effect of zero on the energy balance + ! of the urban landunit. + eflx_ventilation(l) = wtlunit_roof(l) * ( - ht_roof(l)*(vent_ach/3600._r8) & + * rho_dair(l) * cpair * (taf(l) - t_building(l)) ) end if end do @@ -931,14 +939,6 @@ subroutine BuildingTemperature (bounds, num_urbanl, filter_urbanl, num_nolakec, eflx_urban_heat(l) = 0._r8 end if eflx_building(l) = wtlunit_roof(l) * (ht_roof(l) * rho_dair(l)*cpair/dtime) * (t_building(l) - t_building_bef(l)) - - ! Sensible heat flux from ventilation. It is added as a flux to the canyon floor in SoilTemperatureMod. - ! Note that we multiply it here by wtlunit_roof which converts it from W/m2 of building area to W/m2 - ! of urban area. eflx_urban_ac and eflx_urban_heat are treated similarly above. This flux is balanced - ! by an equal and opposite flux into/out of the building and so has a net effect of zero on the energy balance - ! of the urban landunit. - eflx_ventilation(l) = wtlunit_roof(l) * ( - ht_roof(l)*(vent_ach/3600._r8) & - * rho_dair(l) * cpair * (taf(l) - t_building(l)) ) end if end do From 92fbaada788f59b7fa5bbc3e2d8addd0bbd840c9 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Wed, 8 Dec 2021 14:03:48 -0700 Subject: [PATCH 021/223] updates to facilitate user mods and config file --- python/ctsm/site_and_regional/base_case.py | 7 +- .../ctsm/site_and_regional/regional_case.py | 6 +- .../site_and_regional/single_point_case.py | 76 +- python/ctsm/subset_data.py | 769 ++++++++---------- tools/site_and_regional/default_data.cfg | 28 + tools/site_and_regional/subset_data | 4 - 6 files changed, 422 insertions(+), 468 deletions(-) create mode 100644 tools/site_and_regional/default_data.cfg diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 1a17762281..d30ff1346a 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -12,9 +12,7 @@ class BaseCase: """ Parent class to SinglePointCase and RegionalCase - ... - Attributes ---------- create_domain : bool @@ -25,12 +23,10 @@ class BaseCase: flag for creating landuse file create_datm : bool flag for creating DATM files - Methods ------- create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) create 1d coordinate variables to enable sel() method - add_tag_to_filename(filename, tag) add a tag and timetag to a filename ending with [._]cYYMMDD.nc or [._]YYMMDD.nc @@ -136,7 +132,8 @@ def get_git_sha(): """ try: sha = ( - subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) + subprocess.check_output( + ["git", "rev-parse", "--short", "HEAD"]) .strip() .decode() ) diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 4b0d4fbb3c..de50e4a9e5 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -70,7 +70,8 @@ def create_surfdata_at_reg(self): print("Creating surface dataset file at region:", self.tag) # create 1d coordinate variables to enable sel() method filename = self.fsurf_in - f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f2 = self.create_1d_coord( + filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") lat = f2["lat"] lon = f2["lon"] # subset longitude and latitude arrays @@ -93,7 +94,8 @@ def create_landuse_at_reg(self): # print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) print("Creating surface dataset file at region:", self.tag) # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f2 = self.create_1d_coord( + self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") lat = f2["lat"] lon = f2["lon"] # subset longitude and latitude arrays diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 2f4949572a..bc77437499 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -8,9 +8,7 @@ class SinglePointCase(BaseCase): """ A case to encapsulate single point cases. - ... - Attributes ---------- plat : float @@ -19,13 +17,11 @@ class SinglePointCase(BaseCase): longitude site_name: str -- default = None Site name - Methods ------- create_tag create a tag for single point which is the site name or the "lon-lat" format if the site name does not exist. - create_domain_at_point Create domain file at a single point. create_landuse_at_point: @@ -118,7 +114,8 @@ def create_landuse_at_point(self): print("----------------------------------------------------------------------") print("Creating landuse file at ", self.plon, self.plat, ".") # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f2 = self.create_1d_coord( + self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat f3 = f2.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") @@ -131,7 +128,8 @@ def create_landuse_at_point(self): # revert expand dimensions of YEAR year = np.squeeze(np.asarray(f3["YEAR"])) - x = xr.DataArray(year, coords={"time": f3["time"]}, dims="time", name="YEAR") + x = xr.DataArray( + year, coords={"time": f3["time"]}, dims="time", name="YEAR") x.attrs["units"] = "unitless" x.attrs["long_name"] = "Year of PFT data" f3["YEAR"] = x @@ -152,12 +150,17 @@ def create_surfdata_at_point(self): print("Creating surface dataset file at ", self.plon, self.plat, ".") # create 1d coordinate variables to enable sel() method filename = self.fsurf_in - f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f2 = self.create_1d_coord( + filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat f3 = f2.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") # expand dimensions f3 = f3.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) + # update the plon and plat to match the surface data + self.plat = f3.coords['lsmlat'].values[0] + self.plon = f3.coords['lsmlon'].values[0] + # modify surface data properties if self.overwrite_single_pft: f3["PCT_NAT_PFT"][:, :, :] = 0 @@ -243,20 +246,33 @@ def extract_datm_at(self, file_in, file_out): f2.close() f3.close() - def create_datm_at_point(self): + def write_shell_commands(self, file): + # writes out shell commands for single-point runs + + file.write('! Change below line if you move the user mods') + file.write('\n' + './xmlchange CLM_USRDAT_DIR=' + self.out_dir + '\n') + file.write('\n' + "./xmlchange PTS_LON=" + str(self.plon) + '\n') + file.write('\n' + "./xmlchange PTS_LAT=" + str(self.plat) + '\n') + file.write('\n' + "./xmlchange MPILIB=mpi-serial" + '\n') + file.close() + + def write_nl_commands(self, streamname, file): + line_mapalgo = streamname + ':mapalgo=none' + line_meshfile = streamname + ':meshfile=none' + + file.write("\n" + line_meshfile + "\n") + file.write("\n" + line_mapalgo + "\n") + + def create_datm_at_point(self, create_user_mods, datm_streams_file): print("----------------------------------------------------------------------") print("Creating DATM files at ", self.plon, self.plat, ".") - # -- specify subdirectory names and filename prefixes - solrdir = "Solar/" - precdir = "Precip/" - tpqwldir = "TPHWL/" - prectag = "clmforc.GSWP3.c2011.0.5x0.5.Prec." - solrtag = "clmforc.GSWP3.c2011.0.5x0.5.Solr." - tpqwtag = "clmforc.GSWP3.c2011.0.5x0.5.TPQWL." # -- create data files infile = [] outfile = [] + solarfiles = [] + precfiles = [] + tpqwfiles = [] for y in range(self.datm_syr, self.datm_eyr + 1): ystr = str(y) for m in range(1, 13): @@ -266,17 +282,18 @@ def create_datm_at_point(self): dtag = ystr + "-" + mstr - fsolar = self.dir_input_datm + solrdir + solrtag + dtag + ".nc" - fsolar2 = self.dir_output_datm + solrtag + self.tag + "." + dtag + ".nc" - fprecip = self.dir_input_datm + precdir + prectag + dtag + ".nc" - fprecip2 = ( - self.dir_output_datm + prectag + self.tag + "." + dtag + ".nc" - ) - ftpqw = self.dir_input_datm + tpqwldir + tpqwtag + dtag + ".nc" - ftpqw2 = self.dir_output_datm + tpqwtag + self.tag + "." + dtag + ".nc" + fsolar = os.path.join(self.dir_input_datm, self.dir_solar, self.tag_solar + dtag + ".nc") + fsolar2 = os.path.join(self.dir_output_datm, self.tag_solar + self.tag + "." + dtag + ".nc") + fprecip = os.path.join(self.dir_input_datm, self.dir_prec, self.tag_prec + dtag + ".nc") + fprecip2 = os.path.join(self.dir_output_datm, self.tag_prec + self.tag + "." + dtag + ".nc") + ftpqw = os.path.join(self.dir_input_datm, self.dir_tpqw, self.tag_tpqw + dtag + ".nc") + ftpqw2 = os.path.join(self.dir_output_datm, self.tag_tpqw + self.tag + "." + dtag + ".nc") infile += [fsolar, fprecip, ftpqw] outfile += [fsolar2, fprecip2, ftpqw2] + solarfiles.append(fsolar2) + precfiles.append(fprecip2) + tpqwfiles.append(ftpqw2) nm = len(infile) for n in range(nm): @@ -286,3 +303,16 @@ def create_datm_at_point(self): self.extract_datm_at(file_in, file_out) print("All DATM files are created in: " + self.dir_output_datm) + + if create_user_mods: + solarfile_line = self.name_solar + ':datafiles=' + ','.join(solarfiles) + precfile_line = self.name_prec + ':datafiles=' + ','.join(precfiles) + tpqwfile_line = self.name_tpqw + ':datafiles=' + ','.join(tpqwfiles) + + with open(datm_streams_file, 'a') as user_nl_file: + user_nl_file.write('\n' + solarfile_line + '\n') + self.write_nl_commands(self.name_solar, user_nl_file) + user_nl_file.write('\n' + precfile_line + '\n') + self.write_nl_commands(self.name_prec, user_nl_file) + user_nl_file.write('\n' + tpqwfile_line + '\n') + self.write_nl_commands(self.name_tpqw, user_nl_file) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 85b420b229..f1db4a7f65 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -3,71 +3,36 @@ |------------------------------------------------------------------| |--------------------- Instructions -----------------------------| |------------------------------------------------------------------| - Instructions for running on Cheyenne/Casper: - load the following into your local environment module load python ncar_pylib - ------------------------------------------------------------------- To see the available options for single point cases: ./subset_data.py point --help - To see the available options for regional cases: - ./subset_data.py reg --help + ./subset_data.py reg --help ------------------------------------------------------------------- - This script extracts domain files, surface dataset, and DATM files -at either a single point or a region using the global dataset. - -After creating a case using a global compset, run preview_namelist. -From the resulting lnd_in file in the run directory, find the name -of the domain file, and the surface data file. - -From the datm streams files (e.g. datm.streams.txt.CLMGSWP3v1.Precip) -find the name of the datm forcing data domain file and forcing files. -Use these file names as the sources for the single point/regional -files to be created (see below). - -After running this script, point to the new CLM domain and surface -dataset using the user_nl_clm file in the case directory. In addition, -copy the datm.streams files to the case directory, with the prefix -'user_', e.g. user_datm.streams.txt.CLMGSWP3v1.Precip. Change the -information in the user_datm.streams* files to point to the single -point datm data (domain and forcing files) created using this script. - -The domain file is not set via user_nl_clm, but requires changing -LND_DOMAIN and ATM_DOMAIN (and their paths) in env_run.xml. - -Using single point forcing data requires specifying the nearest -neighbor mapping algorithm for the datm streams (usually they are -the first three in the list) in user_nl_datm: mapalgo = 'nn','nn','nn', -..., where the '...' can still be 'bilinear', etc, depending on the -other streams that are being used, e.g. aerosols, anomaly forcing, -bias correction. - -The file env_mach_pes.xml should be modified to specify a single -processor. The mpi-serial libraries should also be used, and can be -set in env_build.xml by changing "MPILIB" to "mpi-serial" prior to -setting up the case. - -The case for the single point simulation should have river routing -and land ice models turned off (i.e. the compset should use stub -models SROF and SGLC) - -By default, it only extracts surface dataset and for extracting other +at either a single point or a region using a global dataset. Currently this +script subsets default surface, landuse, and DATM files, which can be seen in +the defaults.cfg file. + +To run a single-point or regional case using this data, you must update the +variable(s) `fsurdat` and/or `landuse` in the user_nl_clm namelist file to be +the full path to the subset files. This script will automatically create this +file using the flag --create-user-mods. +To use subset climate data, the namelist file user_nl_datm_streams must also +be updated - this script will automatically create this file with +--create-user-mods. This flag will also create necessary single-point xml +commands in the file shell_commands. + +To use the created user mods with a case use --user-mods-dir PATH/TO/USER/MODS +in the ./create.newcase call. + +By default, this script only extracts surface dataset. For extracting other files, the appropriate flags should be used. ------------------------------------------------------------------- -To run the script for a single point: - ./subset_data.py point --help - -To run the script for a region: - ./subset_data.py reg --help - -To remove NPL from your environment on Cheyenne/Casper: - deactivate -------------------------------------------------------------------- """ # TODO @@ -83,6 +48,7 @@ import logging import subprocess import argparse +import configparser import numpy as np import xarray as xr @@ -92,11 +58,20 @@ from logging.handlers import RotatingFileHandler from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +# Get the ctsm util tools and then the cime tools. +_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..","..",'python')) +sys.path.insert(1, _CTSM_PYTHON) + +from ctsm import add_cime_to_path +from ctsm.path_utils import path_to_ctsm_root + from ctsm.site_and_regional.base_case import BaseCase from ctsm.site_and_regional.single_point_case import SinglePointCase from ctsm.site_and_regional.regional_case import RegionalCase +# -- Globals and Default Values --- +DEFAULTS_FILE = "default_data.cfg" myname = getuser() @@ -110,11 +85,12 @@ def get_parser(): parser.print_usage = parser.print_help subparsers = parser.add_subparsers( - help="Two possible ways to run this sript, either:", dest="run_type" - ) - pt_parser = subparsers.add_parser("point", help="Run script for a single point.") + help="Two possible ways to run this script, either:", dest="run_type") + pt_parser = subparsers.add_parser( + "point", help="Run script for a single point.") rg_parser = subparsers.add_parser("reg", help="Run script for a region.") + # First add arguments specific to a point or regional parser pt_parser.add_argument( "--lat", help="Single point latitude. [default: %(default)s]", @@ -140,124 +116,8 @@ def get_parser(): dest="site_name", required=False, type=str, - default="", - ) - pt_parser.add_argument( - "--create_domain", - help="Flag for creating CLM domain file at single point. [default: %(default)s]", - action="store", - dest="create_domain", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - pt_parser.add_argument( - "--create_surface", - help="Flag for creating surface data file at single point. [default: %(default)s]", - action="store", - dest="create_surfdata", - type=str2bool, - nargs="?", - const=True, - required=False, - default=True, - ) - pt_parser.add_argument( - "--create_landuse", - help="Flag for creating landuse data file at single point. [default: %(default)s]", - action="store", - dest="create_landuse", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - pt_parser.add_argument( - "--create_datm", - help="Flag for creating DATM forcing data at single point. [default: %(default)s]", - action="store", - dest="create_datm", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - pt_parser.add_argument( - "--datm_syr", - help="Start year for creating DATM forcing at single point. [default: %(default)s]", - action="store", - dest="datm_syr", - required=False, - type=int, - default=1901, - ) - pt_parser.add_argument( - "--datm_eyr", - help="End year for creating DATM forcing at single point. [default: %(default)s]", - action="store", - dest="datm_eyr", - required=False, - type=int, - default=2014, - ) - pt_parser.add_argument( - "--crop", - help="Create datasets using the extensive list of prognostic crop types. [default: %(default)s]", - action="store_true", - dest="crop_flag", - default=False, - ) - pt_parser.add_argument( - "--dompft", - help="Dominant PFT type . [default: %(default)s] ", - action="store", - dest="dom_pft", - type=int, - default=7, - ) - pt_parser.add_argument( - "--no-unisnow", - help="Turn off the flag for create uniform snowpack. [default: %(default)s]", - action="store_false", - dest="uni_snow", - default=True, - ) - pt_parser.add_argument( - "--no-overwrite_single_pft", - help="Turn off the flag for making the whole grid 100%% single PFT. [default: %(default)s]", - action="store_false", - dest="overwrite_single_pft", - default=True, - ) - pt_parser.add_argument( - "--zero_nonveg", - help="Set all non-vegetation landunits to zero. [default: %(default)s]", - action="store", - dest="zero_nonveg", - type=bool, - default=True, - ) - pt_parser.add_argument( - "--no_saturation_excess", - help="Turn off the flag for saturation excess. [default: %(default)s]", - action="store", - dest="no_saturation_excess", - type=bool, - default=True, - ) - pt_parser.add_argument( - "--outdir", - help="Output directory. [default: %(default)s]", - action="store", - dest="out_dir", - type=str, - default="/glade/scratch/" + myname + "/single_point/", + default="single_point", ) - rg_parser.add_argument( "--lat1", help="Region start latitude. [default: %(default)s]", @@ -301,93 +161,146 @@ def get_parser(): dest="reg_name", required=False, type=str, - default="", - ) - rg_parser.add_argument( - "--create_domain", - help="Flag for creating CLM domain file for a region. [default: %(default)s]", - action="store", - dest="create_domain", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - rg_parser.add_argument( - "--create_surface", - help="Flag for creating surface data file for a region. [default: %(default)s]", - action="store", - dest="create_surfdata", - type=str2bool, - nargs="?", - const=True, - required=False, - default=True, - ) - rg_parser.add_argument( - "--create_landuse", - help="Flag for creating landuse data file for a region. [default: %(default)s]", - action="store", - dest="create_landuse", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - rg_parser.add_argument( - "--create_datm", - help="Flag for creating DATM forcing data for a region. [default: %(default)s]", - action="store", - dest="create_datm", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - rg_parser.add_argument( - "--datm_syr", - help="Start year for creating DATM forcing for a region. [default: %(default)s]", - action="store", - dest="datm_syr", - required=False, - type=int, - default=1901, - ) - rg_parser.add_argument( - "--datm_eyr", - help="End year for creating DATM forcing for a region. [default: %(default)s]", - action="store", - dest="datm_eyr", - required=False, - type=int, - default=2014, - ) - rg_parser.add_argument( - "--crop", - help="Create datasets using the extensive list of prognostic crop types. [default: %(default)s]", - action="store_true", - dest="crop_flag", - default=False, - ) - rg_parser.add_argument( - "--dompft", - help="Dominant PFT type . [default: %(default)s] ", - action="store", - dest="dom_pft", - type=int, - default=7, - ) - rg_parser.add_argument( - "--outdir", - help="Output directory. [default: %(default)s]", - action="store", - dest="out_dir", - type=str, - default="/glade/scratch/" + myname + "/regional/", - ) + default="region", + ) + + # Now add arguments shared between pt_parser and rg_parser + for subparser in [pt_parser, rg_parser]: + subparser.add_argument( + "--create-domain", + help="Create CLM domain file at single point or region. [default: %(default)s]", + action="store", + dest="create_domain", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + subparser.add_argument( + "--create-surface", + help="Create surface data file at single point or region. [default: %(default)s]", + action="store", + dest="create_surfdata", + type=str2bool, + nargs="?", + const=True, + required=False, + default=True, + ) + subparser.add_argument( + "--create-landuse", + help="Create landuse data file at single point or region. [default: %(default)s]", + action="store", + dest="create_landuse", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + subparser.add_argument( + "--create-datm", + help="Create DATM forcing data at single point or region. [default: %(default)s]", + action="store", + dest="create_datm", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + subparser.add_argument( + "--datm-syr", + help="Start year for creating DATM forcing. [default: %(default)s]", + action="store", + dest="datm_syr", + required=False, + type=int, + default=1901, + ) + subparser.add_argument( + "--datm-eyr", + help="End year for creating DATM forcing. [default: %(default)s]", + action="store", + dest="datm_eyr", + required=False, + type=int, + default=2014, + ) + subparser.add_argument( + "--crop", + help="Create datasets using the extensive list of prognostic crop types. [default: %(default)s]", + action="store_true", + dest="crop_flag", + default=False, + ) + subparser.add_argument( + "--dompft", + help="Dominant PFT type. [default: %(default)s] ", + action="store", + dest="dom_pft", + type=int, + default=7, + ) + subparser.add_argument( + "--no-unisnow", + help="Turn off the flag for create uniform snowpack. [default: %(default)s]", + action="store_false", + dest="uni_snow", + default=True, + ) + subparser.add_argument( + "--no-overwrite-single-pft", + help="Turn off the flag for making the whole grid 100%% single PFT. [default: %(default)s]", + action="store_false", + dest="overwrite_single_pft", + default=True, + ) + subparser.add_argument( + "--zero-nonveg", + help="Set all non-vegetation landunits to zero. [default: %(default)s]", + action="store", + dest="zero_nonveg", + type=bool, + default=True, + ) + subparser.add_argument( + "--no-saturation-excess", + help="Turn off the flag for saturation excess. [default: %(default)s]", + action="store", + dest="no_saturation_excess", + type=bool, + default=True, + ) + subparser.add_argument( + "--create-user-mods", + help="Create a user mods directory for running CTSM. [default: %(default)s]", + action="store", + dest="create_user_mods", + type=str2bool, + default=False, + + ) + subparser.add_argument( + "--user-mods-dir", + help="Path to user mods directory. [default: %(default)s]", + action="store", + dest="user_mods_dir", + type=str, + default="/glade/scratch/" + myname + "/subset_data/user_mods", + ) + subparser.add_argument( + "--outdir", + help="Output directory. [default: %(default)s]", + action="store", + dest="out_dir", + type=str, + default="/glade/scratch/" + myname + "/subset_data/", + ) + + newline = "\n" + parser.epilog = f"""==================================={newline}{newline}{pt_parser.format_help()}{newline}{newline}{rg_parser.format_help()}""" return parser @@ -396,15 +309,12 @@ def str2bool(v): """ Function for converting different forms of command line boolean strings to boolean value. - Args: v (str): String bool input - Raises: if the argument is not an acceptable boolean string (such as yes or no ; true or false ; y or n ; t or f ; 0 or 1). argparse.ArgumentTypeError: The string should be one of the mentioned values. - Returns: bool: Boolean value corresponding to the input. """ @@ -425,16 +335,12 @@ def plat_type(x): Function to define lat type for the parser and raise error if latitude is not between -90 and 90. - Args: x(str): latitude - Raises: Error when x (latitude) is not between -90 and 90. - Returns: x (float): latitude in float - """ x = float(x) if (x < -90) or (x > 90): @@ -449,13 +355,10 @@ def plon_type(x): Function to define lon type for the parser and convert negative longitudes and raise error if lon is not between -180 and 360. - Args: x (str): longitude - Raises: Error: when latitude is <-180 and >360. - Returns: x(float): converted longitude between 0 and 360 """ @@ -542,7 +445,8 @@ def setup_stdout(cls, also_log_to_stream=True): Setup logger for stdout """ stdout_logger = logging.getLogger("STDOUT") - sl = StreamToLogger(sys.stdout, stdout_logger, logging.INFO, also_log_to_stream) + sl = StreamToLogger(sys.stdout, stdout_logger, + logging.INFO, also_log_to_stream) sys.stdout = sl @classmethod @@ -573,6 +477,34 @@ def flush(self): def main(): + defaults = configparser.ConfigParser() + defaults.read(DEFAULTS_FILE) + + # Parse defaults + dir_inputdata = defaults.get('main', 'clmforcingindir') + dir_input_datm = defaults.get('datm_gswp3', 'dir') + domain_file = defaults.get('domain', 'file') + fdomain_in = os.path.join(dir_inputdata, domain_file) + fdatmdomain_in = os.path.join(defaults.get('datm_gswp3', 'dir'), defaults.get('datm_gswp3', 'domain')) + fsurfdat_78pft = os.path.join(dir_inputdata, defaults.get('surfdat', 'dir'), + defaults.get('surfdat', 'surfdat_78pft')) + fsurfdat_16pft = os.path.join(dir_inputdata, defaults.get('surfdat', 'dir'), + defaults.get('surfdat', 'surfdat_16pft')) + landuse_78pft = os.path.join(dir_inputdata, defaults.get('landuse', 'dir'), + defaults.get('landuse', 'landuse_78pft')) + landuse_16pft = os.path.join(dir_inputdata, defaults.get('landuse', 'dir'), + defaults.get('landuse', 'landuse_16pft')) + + datm_solardir = defaults.get('datm_gswp3', 'solardir') + datm_precdir = defaults.get('datm_gswp3', 'precdir') + datm_tpqwdir = defaults.get('datm_gswp3', 'tpqwdir') + datm_solartag = defaults.get('datm_gswp3', 'solartag') + datm_prectag = defaults.get('datm_gswp3', 'prectag') + datm_tpqwtag = defaults.get('datm_gswp3', 'tpqwtag') + datm_solarname = defaults.get('datm_gswp3', 'solarname') + datm_precname = defaults.get('datm_gswp3', 'precname') + datm_tpqwname = defaults.get('datm_gswp3', 'tpqwname') + args = get_parser().parse_args() # --------------------------------- # @@ -593,40 +525,89 @@ def main(): # --------------------------------- # + # print help and exit when no option is chosen + if (args.run_type != "point" and args.run_type != "reg"): + get_parser().print_help() + quit() + + # -- Specify which types of data to subset + create_domain = args.create_domain + create_surfdata = args.create_surfdata + create_landuse = args.create_landuse + create_datm = args.create_datm + + crop_flag = args.crop_flag + if crop_flag: + num_pft = "78" + fsurf_in = fsurfdat_78pft + fluse_in = landuse_78pft + else: + num_pft = "16" + fsurf_in = fsurfdat_16pft + fluse_in = landuse_16pft + + print("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) + + # -- Start and ending years for DATM data + datm_syr = args.datm_syr + datm_eyr = args.datm_eyr + + # -- Modify landunit structure + overwrite_single_pft = args.overwrite_single_pft + dominant_pft = args.dom_pft + zero_nonveg_landunits = args.zero_nonveg + uniform_snowpack = args.uni_snow + no_saturation_excess = args.no_saturation_excess + + # -- Set input and output filenames + # -- Specify input and output directories + dir_output = args.out_dir + if not os.path.isdir(dir_output): + os.mkdir(dir_output) + + dir_output_datm = os.path.join(dir_output, "datmdata/") + if create_datm: + if not os.path.isdir(dir_output_datm): + os.mkdir(dir_output_datm) + print("dir_input_datm : ", dir_input_datm) + print("dir_output_datm : ", dir_output_datm) + + if args.create_user_mods: + if not os.path.isdir(args.user_mods_dir): + os.mkdir(args.user_mods_dir) + + cesmroot = path_to_ctsm_root() + + # -- Create user_nl_clm file + nl_clm_base = os.path.join(cesmroot, "cime_config/user_nl_clm") + nl_clm = os.path.join(args.user_mods_dir, "user_nl_clm") + with open(nl_clm_base, 'r') as basefile, open(nl_clm, 'w') as userfile: + for line in basefile: + userfile.write(line) + + # -- Create user_nl_datm_streams file + if create_datm: + nl_datm_base = os.path.join(cesmroot, "components/cdeps/datm/cime_config/user_nl_datm_streams") + nl_datm = os.path.join(args.user_mods_dir, "user_nl_datm_streams") + with open(nl_datm_base, 'r') as basefile, open(nl_datm, 'w') as userfile: + for line in basefile: + userfile.write(line) + + + if args.run_type == "point": print( "----------------------------------------------------------------------------" ) print( - "This script extracts a single point from the global CTSM inputdata datasets." + "This script extracts a single point from the global CTSM datasets." ) # -- Specify point to extract plon = args.plon plat = args.plat - - # -- Create regional CLM domain file - create_domain = args.create_domain - # -- Create CLM surface data file - create_surfdata = args.create_surfdata - # -- Create CLM surface data file - create_landuse = args.create_landuse - # -- Create single point DATM atmospheric forcing data - create_datm = args.create_datm - datm_syr = args.datm_syr - datm_eyr = args.datm_eyr - - crop_flag = args.crop_flag - site_name = args.site_name - # -- Modify landunit structure - overwrite_single_pft = args.overwrite_single_pft - dominant_pft = args.dom_pft - zero_nonveg_landunits = args.zero_nonveg - uniform_snowpack = args.uni_snow - no_saturation_excess = args.no_saturation_excess - # -- Create SinglePoint Object single_point = SinglePointCase( plat, @@ -644,130 +625,91 @@ def main(): ) single_point.create_tag() - print(single_point) - # output_to_logger (single_point) - - if crop_flag: - num_pft = "78" - else: - num_pft = "16" - - print("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) - - # -- Set input and output filenames - # -- Specify input and output directories - dir_output = args.out_dir - if not os.path.isdir(dir_output): - os.mkdir(dir_output) - - dir_inputdata = "/glade/p/cesmdata/cseg/inputdata/" - dir_clm_forcedata = "/glade/p/cgd/tss/CTSM_datm_forcing_data/" - dir_input_datm = os.path.join( - dir_clm_forcedata, "atm_forcing.datm7.GSWP3.0.5d.v1.c170516/" - ) - dir_output_datm = os.path.join(dir_output, "datmdata/") - if not os.path.isdir(dir_output_datm): - os.mkdir(dir_output_datm) - - print("dir_input_datm : ", dir_input_datm) # - print("dir_output_datm : ", dir_output_datm) # - - # -- Set time stamp - today = date.today() - timetag = today.strftime("%y%m%d") - - # -- Specify land domain file --------------------------------- - fdomain_in = os.path.join( - dir_inputdata, "share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc" - ) - fdomain_out = dir_output + single_point.add_tag_to_filename( - fdomain_in, single_point.tag - ) - single_point.fdomain_in = fdomain_in - single_point.fdomain_out = fdomain_out - print("fdomain_in :", fdomain_in) # - print("fdomain_out :", fdomain_out) # - - # -- Specify surface data file -------------------------------- - if crop_flag: - fsurf_in = os.path.join( - dir_inputdata, - "lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc", - ) - else: - fsurf_in = os.path.join( - dir_inputdata, - "lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc", - ) - - # fsurf_out = dir_output + single_point.add_tag_to_filename(fsurf_in, single_point.tag) # remove res from filename for singlept - fsurf_out = dir_output + single_point.create_fileout_name( - fsurf_in, single_point.tag - ) - single_point.fsurf_in = fsurf_in - single_point.fsurf_out = fsurf_out - print("fsurf_in :", fsurf_in) # - print("fsurf_out :", fsurf_out) # - - # -- Specify landuse file ------------------------------------- - if crop_flag: - fluse_in = os.path.join( - dir_inputdata, - "lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc", - ) - else: - fluse_in = os.path.join( - dir_inputdata, - "lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc", - ) - # fluse_out = dir_output + single_point.add_tag_to_filename( fluse_in, single_point.tag ) # remove resolution from filename for singlept cases - fluse_out = dir_output + single_point.create_fileout_name( - fluse_in, single_point.tag - ) - single_point.fluse_in = fluse_in - single_point.fluse_out = fluse_out - print("fluse_in :", fluse_in) # - print("fluse_out :", fluse_out) # - - # -- Specify datm domain file --------------------------------- - fdatmdomain_in = os.path.join( - dir_clm_forcedata, - "atm_forcing.datm7.GSWP3.0.5d.v1.c170516/domain.lnd.360x720_gswp3.0v1.c170606.nc", - ) - fdatmdomain_out = dir_output_datm + single_point.add_tag_to_filename( - fdatmdomain_in, single_point.tag - ) - single_point.fdatmdomain_in = fdatmdomain_in - single_point.fdatmdomain_out = fdatmdomain_out - print("fdatmdomain_in : ", fdatmdomain_in) # - print("fdatmdomain out : ", fdatmdomain_out) # - # -- Create CTSM domain file if create_domain: + # -- Specify land domain file --------------------------------- + fdomain_out = dir_output + single_point.add_tag_to_filename( + fdomain_in, single_point.tag + ) + print(fdomain_out) + single_point.fdomain_in = fdomain_in + single_point.fdomain_out = fdomain_out + print("fdomain_in :", fdomain_in) # + print("fdomain_out :", fdomain_out) # single_point.create_domain_at_point() # -- Create CTSM surface data file if create_surfdata: + # -- Specify surface file --------------------------------- + fsurf_out = dir_output + single_point.create_fileout_name( + fsurf_in, single_point.tag + ) + single_point.fsurf_in = fsurf_in + single_point.fsurf_out = fsurf_out + print("fsurf_in :", fsurf_in) + print("fsurf_out :", fsurf_out) single_point.create_surfdata_at_point() + if args.create_user_mods: + nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") + line = 'fsurdat = ' + "'" + fsurf_out + "'" + nl_clm.write('\n' + line + '\n') + nl_clm.close() # -- Create CTSM transient landuse data file if create_landuse: + # -- Specify surface file --------------------------------- + fluse_out = dir_output + single_point.create_fileout_name( + fluse_in, single_point.tag + ) + single_point.fluse_in = fluse_in + single_point.fluse_out = fluse_out + print("fluse_in :", fluse_in) + print("fluse_out :", fluse_out) single_point.create_landuse_at_point() + if args.create_user_mods: + nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") + line = 'landuse = ' + fluse_out + nl_clm.write('\n' + line + '\n') + nl_clm.close() # -- Create single point atmospheric forcing data if create_datm: + # -- Specify datm domain file --------------------------------- + fdatmdomain_out = dir_output_datm + single_point.add_tag_to_filename( + fdatmdomain_in, single_point.tag + ) + single_point.fdatmdomain_in = fdatmdomain_in + single_point.fdatmdomain_out = fdatmdomain_out + print("fdatmdomain_in : ", fdatmdomain_in) + print("fdatmdomain out : ", fdatmdomain_out) single_point.create_datmdomain_at_point() single_point.datm_syr = datm_syr single_point.datm_eyr = datm_eyr single_point.dir_input_datm = dir_input_datm single_point.dir_output_datm = dir_output_datm - single_point.create_datm_at_point() + single_point.dir_solar = datm_solardir + single_point.dir_prec = datm_precdir + single_point.dir_tpqw = datm_tpqwdir + single_point.tag_solar = datm_solartag + single_point.tag_prec = datm_prectag + single_point.tag_tpqw = datm_tpqwtag + single_point.name_solar = datm_solarname + single_point.name_prec = datm_precname + single_point.name_tpqw = datm_tpqwname + single_point.create_datm_at_point(args.create_user_mods, nl_datm) + + if args.create_user_mods: + shell_commands_file = open(os.path.join(args.user_mods_dir, + "shell_commands"), 'w') + single_point.write_shell_commands(shell_commands_file) + print("Successfully ran script for single point.") exit() elif args.run_type == "reg": print("Running the script for the region") + # -- Specify region to extract lat1 = args.lat1 lat2 = args.lat2 @@ -775,17 +717,6 @@ def main(): lon1 = args.lon1 lon2 = args.lon2 - # -- Create regional CLM domain file - create_domain = args.create_domain - # -- Create CLM surface data file - create_surfdata = args.create_surfdata - # -- Create CLM surface data file - create_landuse = args.create_landuse - # -- Create DATM atmospheric forcing data - create_datm = args.create_datm - - crop_flag = args.crop_flag - reg_name = args.reg_name region = RegionalCase( @@ -802,24 +733,8 @@ def main(): print(region) - if crop_flag: - num_pft = "78" - else: - num_pft = "16" - - print(" crop_flag = " + crop_flag.__str__() + " num_pft =" + num_pft) - region.create_tag() - # -- Set input and output filenames - # -- Specify input and output directories - dir_output = "/glade/scratch/" + myname + "/region/" - if not os.path.isdir(dir_output): - os.mkdir(dir_output) - - dir_inputdata = "/glade/p/cesmdata/cseg/inputdata/" - dir_clm_forcedata = "/glade/p/cgd/tss/CTSM_datm_forcing_data/" - # -- Set time stamp command = 'date "+%y%m%d"' x2 = subprocess.Popen(command, stdout=subprocess.PIPE, shell="True") @@ -828,9 +743,6 @@ def main(): print(timetag) # -- Specify land domain file --------------------------------- - fdomain_in = ( - dir_inputdata + "share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc" - ) fdomain_out = ( dir_output + "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc" ) @@ -839,10 +751,6 @@ def main(): region.fdomain_out = fdomain_out # -- Specify surface data file -------------------------------- - fsurf_in = ( - dir_inputdata - + "lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc" - ) fsurf_out = ( dir_output + "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" @@ -853,10 +761,6 @@ def main(): region.fsurf_out = fsurf_out # -- Specify landuse file ------------------------------------- - fluse_in = ( - dir_inputdata - + "lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc" - ) fluse_out = ( dir_output + "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" @@ -878,7 +782,4 @@ def main(): if create_landuse: region.create_landuse_at_reg() print("Successfully ran script for a regional case.") - - else: - # print help when no option is chosen - get_parser().print_help() + exit() diff --git a/tools/site_and_regional/default_data.cfg b/tools/site_and_regional/default_data.cfg new file mode 100644 index 0000000000..f689c99044 --- /dev/null +++ b/tools/site_and_regional/default_data.cfg @@ -0,0 +1,28 @@ +[main] +clmforcingindir = /glade/p/cesmdata/inputdata + +[datm_gswp3] +dir = /glade/p/cgd/tss/CTSM_datm_forcing_data/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 +domain = domain.lnd.360x720_gswp3.0v1.c170606.nc +solardir = Solar +precdir = Precip +tpqwdir = TPHWL +solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. +prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. +tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. +solarname = CLMGSWP3v1.Solar +precname = CLMGSWP3v1.Precip +tpqwname = CLMGSWP3v1.TPQW + +[surfdat] +dir = lnd/clm2/surfdata_map/release-clm5.0.18 +surfdat_16pft = surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc +surfdat_78pft = surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc + +[landuse] +dir = lnd/clm2/surfdata_map/release-clm5.0.18 +landuse_16pft = landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc +landuse_78pft = landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc + +[domain] +file = share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc diff --git a/tools/site_and_regional/subset_data b/tools/site_and_regional/subset_data index 0c4cb9f28d..00d91ca121 100755 --- a/tools/site_and_regional/subset_data +++ b/tools/site_and_regional/subset_data @@ -6,12 +6,8 @@ The original code (subset_data.py) is located under python/ctsm folder. For full instructions on how to run the code and different options, please check python/ctsm/subset_data.py file. - This script extracts domain files, surface dataset, and DATM files at either a single point or a region using the global dataset. - - - To see all available options for single-point subsetting: ./subset_data point --help To see all available options for region subsetting: From 6ddf11f23dfdfedf3a111613dc4af20945b7dcec Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Wed, 8 Dec 2021 14:05:54 -0700 Subject: [PATCH 022/223] add CLM_USRDAT_DIR xml variable --- cime_config/config_component.xml | 67 +++++++++++++++----------------- 1 file changed, 31 insertions(+), 36 deletions(-) diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index f1a57c8da0..4e33b2cd48 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -14,14 +14,10 @@ - clm4.5: - clm5.0: + clm4.5: + clm5.0: clm5.1: Satellite phenology: - CN: Carbon Nitrogen model - CNDV: CN with Dynamic Vegetation (deprecated) - CN with prognostic crop: - CNDV with prognostic crop: (deprecated) Satellite phenology with VIC hydrology: Satellite phenology without anthropomorphic influences @@ -168,34 +164,21 @@ char - -bgc sp - -bgc cn - -bgc bgc - -bgc cn -crop - -bgc bgc -crop - -bgc cn -dynamic_vegetation + + -bgc sp + -bgc bgc + -bgc bgc -crop + -bgc fates -no-megan + + -bgc bgc -dynamic_vegetation - -bgc cn -dynamic_vegetation -crop -bgc bgc -dynamic_vegetation -crop -bgc sp -vichydro - -bgc fates -no-megan - -bgc sp - -bgc bgc + -bgc bgc -dynamic_vegetation - -bgc bgc -crop - -bgc cn - -bgc cn -crop - -bgc cn -dynamic_vegetation - -bgc cn -dynamic_vegetation -crop -bgc bgc -dynamic_vegetation -crop -bgc sp -vichydro - -bgc fates -no-megan - - -bgc sp - -bgc bgc - -bgc bgc -crop - -bgc fates -no-megan run_component_ctsm env_run.xml @@ -260,6 +243,18 @@ This is an advanced flag and should only be used by expert users. + + char + UNSET + run_component_ctsm + env_run.xml + Directory name for user-created surface, landuse, and datm datasets. + This is used as an argument in user_mods namelists (e.g. user_nl_clm, + user_nl_datm_streams) generated with the subset_data script. Users + should modify this variable (in shell_commands or elsewhere) to set the + location of user-created data. The default value is UNSET. + + char on,off @@ -288,19 +283,19 @@ User mods to apply to specific compset matches. - + char - + ABBY,BLAN,CPER,DEJU,GRSM,HEAL,KONA,LENO,NIWO,ONAQ,PUUM,SERC,SRER,TALL,TREE,WOOD, BARR,BONA,DCFS,DELA,GUAN,JERC,KONZ,MLBS,NOGP,ORNL,RMNP,SJER,STEI,TEAK,UKFS,WREF, BART,CLBJ,DSNY,HARV,JORN,LAJA,MOAB,OAES,OSBS,SCBI,SOAP,STER,TOOL,UNDE,YELL - - - run_component_ctsm - env_run.xml - Name of site for NEON tower data - - + + + run_component_ctsm + env_run.xml + Name of site for NEON tower data + + ========================================= CLM naming conventions From 6e2dd118d0d7b8254304de2d9fff3b7ec9747930 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 8 Dec 2021 14:36:43 -0700 Subject: [PATCH 023/223] implementing ctsm_logging instead of logging. --- python/ctsm/subset_data.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 85b420b229..a3f7b0769e 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -97,6 +97,14 @@ from ctsm.site_and_regional.single_point_case import SinglePointCase from ctsm.site_and_regional.regional_case import RegionalCase +from ctsm.ctsm_logging import ( + setup_logging_pre_config, + add_logging_args, + process_logging_args, +) + +logger = logging.getLogger(__name__) + myname = getuser() @@ -573,6 +581,19 @@ def flush(self): def main(): + # -- add logging flags from ctsm_logging + setup_logging_pre_config() + parser = get_parser() + add_logging_args(parser) + + args = parser.parse_args() + + process_logging_args(args) + + logger.info("hello!") + logger.debug("hello!") + exit() + args = get_parser().parse_args() # --------------------------------- # From 7a176baef7ef55587f4bb31c1ac2e6526e7b7b19 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Thu, 9 Dec 2021 14:28:32 -0700 Subject: [PATCH 024/223] add clm_usrdat_dir capabilities --- .../site_and_regional/single_point_case.py | 31 +- python/ctsm/subset_data.py | 326 ++++++++---------- 2 files changed, 159 insertions(+), 198 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index bc77437499..ddbd08b2d9 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -46,6 +46,7 @@ def __init__( zero_nonveg_landunits, uniform_snowpack, no_saturation_excess, + output_dir ): super().__init__(create_domain, create_surfdata, create_landuse, create_datm) self.plat = plat @@ -56,6 +57,7 @@ def __init__( self.zero_nonveg_landunits = zero_nonveg_landunits self.uniform_snowpack = uniform_snowpack self.no_saturation_excess = no_saturation_excess + self.output_dir = output_dir def create_tag(self): if self.site_name: @@ -104,7 +106,7 @@ def create_domain_at_point(self): self.update_metadata(f3) f3.attrs["Created_from"] = self.fdomain_in - wfile = self.fdomain_out + wfile = os.path.join(self.output_dir, self.fdomain_out) f3.to_netcdf(path=wfile, mode="w") print("Successfully created file (fdomain_out)" + self.fdomain_out) f2.close() @@ -138,7 +140,7 @@ def create_landuse_at_point(self): self.update_metadata(f3) f3.attrs["Created_from"] = self.fluse_in - wfile = self.fluse_out + wfile = os.path.join(self.output_dir, self.fluse_out) # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") print("Successfully created file (luse_out)" + self.fluse_out, ".") @@ -149,7 +151,7 @@ def create_surfdata_at_point(self): print("----------------------------------------------------------------------") print("Creating surface dataset file at ", self.plon, self.plat, ".") # create 1d coordinate variables to enable sel() method - filename = self.fsurf_in + filename = os.path.join(self.output_dir, self.fsurf_in) f2 = self.create_1d_coord( filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat @@ -249,7 +251,7 @@ def extract_datm_at(self, file_in, file_out): def write_shell_commands(self, file): # writes out shell commands for single-point runs - file.write('! Change below line if you move the user mods') + file.write('! Change below line if you move the subset data directory') file.write('\n' + './xmlchange CLM_USRDAT_DIR=' + self.out_dir + '\n') file.write('\n' + "./xmlchange PTS_LON=" + str(self.plon) + '\n') file.write('\n' + "./xmlchange PTS_LAT=" + str(self.plat) + '\n') @@ -282,18 +284,20 @@ def create_datm_at_point(self, create_user_mods, datm_streams_file): dtag = ystr + "-" + mstr - fsolar = os.path.join(self.dir_input_datm, self.dir_solar, self.tag_solar + dtag + ".nc") - fsolar2 = os.path.join(self.dir_output_datm, self.tag_solar + self.tag + "." + dtag + ".nc") + fsolar = os.path.join(self.dir_solar, self.tag_solar + dtag + ".nc") + fsolar2 = os.path.join(self.tag_solar + self.tag + "." + dtag + ".nc") fprecip = os.path.join(self.dir_input_datm, self.dir_prec, self.tag_prec + dtag + ".nc") - fprecip2 = os.path.join(self.dir_output_datm, self.tag_prec + self.tag + "." + dtag + ".nc") + fprecip2 = os.path.join(self.tag_prec + self.tag + "." + dtag + ".nc") ftpqw = os.path.join(self.dir_input_datm, self.dir_tpqw, self.tag_tpqw + dtag + ".nc") - ftpqw2 = os.path.join(self.dir_output_datm, self.tag_tpqw + self.tag + "." + dtag + ".nc") + ftpqw2 = os.path.join(self.tag_tpqw + self.tag + "." + dtag + ".nc") + outdir = os.path.join(self.output_dir, self.dir_output_datm) infile += [fsolar, fprecip, ftpqw] - outfile += [fsolar2, fprecip2, ftpqw2] - solarfiles.append(fsolar2) - precfiles.append(fprecip2) - tpqwfiles.append(ftpqw2) + outfile += [os.path.join(outdir, fsolar2), os.path.join(outdir, fprecip2), + os.path.join(outdir, ftpqw2)] + solarfiles.append(os.path.join("$CLM_USRDAT_DIR", self.dir_output_dtam, fsolar2)) + precfiles.append(os.path.join("$CLM_USRDAT_DIR", self.dir_output_dtam, fprecip2)) + tpqwfiles.append(os.path.join("$CLM_USRDAT_DIR", self.dir_output_dtam, ftpqw2)) nm = len(infile) for n in range(nm): @@ -302,8 +306,9 @@ def create_datm_at_point(self, create_user_mods, datm_streams_file): file_out = outfile[n] self.extract_datm_at(file_in, file_out) - print("All DATM files are created in: " + self.dir_output_datm) + print("All DATM files are created in: " + outdir) + # write to user_nl_datm_streams if specified if create_user_mods: solarfile_line = self.name_solar + ':datafiles=' + ','.join(solarfiles) precfile_line = self.name_prec + ':datafiles=' + ','.join(precfiles) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index f1db4a7f65..a10a2a7dc9 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -41,6 +41,11 @@ # Import libraries from __future__ import print_function +from ctsm.site_and_regional.regional_case import RegionalCase +from ctsm.site_and_regional.single_point_case import SinglePointCase +from ctsm.site_and_regional.base_case import BaseCase +from ctsm.path_utils import path_to_ctsm_root +from ctsm import add_cime_to_path import sys import os @@ -59,17 +64,10 @@ from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter # Get the ctsm util tools and then the cime tools. -_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..","..",'python')) +_CTSM_PYTHON = os.path.abspath(os.path.join( + os.path.dirname(__file__), "..", "..", 'python')) sys.path.insert(1, _CTSM_PYTHON) -from ctsm import add_cime_to_path -from ctsm.path_utils import path_to_ctsm_root - - -from ctsm.site_and_regional.base_case import BaseCase -from ctsm.site_and_regional.single_point_case import SinglePointCase -from ctsm.site_and_regional.regional_case import RegionalCase - # -- Globals and Default Values --- DEFAULTS_FILE = "default_data.cfg" myname = getuser() @@ -116,7 +114,15 @@ def get_parser(): dest="site_name", required=False, type=str, - default="single_point", + default="", + ) + pt_parser.add_argument( + "--outdir", + help="Output directory. [default: %(default)s]", + action="store", + dest="out_dir", + type=str, + default="/glade/scratch/" + myname + "/single_point/", ) rg_parser.add_argument( "--lat1", @@ -161,7 +167,15 @@ def get_parser(): dest="reg_name", required=False, type=str, - default="region", + default="", + ) + rg_parser.add_argument( + "--outdir", + help="Output directory. [default: %(default)s]", + action="store", + dest="out_dir", + type=str, + default="/glade/scratch/" + myname + "/region/", ) # Now add arguments shared between pt_parser and rg_parser @@ -290,14 +304,6 @@ def get_parser(): type=str, default="/glade/scratch/" + myname + "/subset_data/user_mods", ) - subparser.add_argument( - "--outdir", - help="Output directory. [default: %(default)s]", - action="store", - dest="out_dir", - type=str, - default="/glade/scratch/" + myname + "/subset_data/", - ) newline = "\n" parser.epilog = f"""==================================={newline}{newline}{pt_parser.format_help()}{newline}{newline}{rg_parser.format_help()}""" @@ -380,11 +386,11 @@ def get_git_sha(): """ try: - #os.abspath(__file__) + # os.abspath(__file__) sha = ( subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) - .strip() - .decode() + .strip() + .decode() ) except subprocess.CalledProcessError: sha = "NOT-A-GIT-REPOSITORY" @@ -431,7 +437,7 @@ class StreamToLogger(object): """ def __init__( - self, stream, logger, log_level=logging.INFO, also_log_to_stream=False + self, stream, logger, log_level=logging.INFO, also_log_to_stream=False ): self.logger = logger self.stream = stream @@ -477,36 +483,13 @@ def flush(self): def main(): + # parse arguments + args = get_parser().parse_args() + + # parse defaults file defaults = configparser.ConfigParser() defaults.read(DEFAULTS_FILE) - # Parse defaults - dir_inputdata = defaults.get('main', 'clmforcingindir') - dir_input_datm = defaults.get('datm_gswp3', 'dir') - domain_file = defaults.get('domain', 'file') - fdomain_in = os.path.join(dir_inputdata, domain_file) - fdatmdomain_in = os.path.join(defaults.get('datm_gswp3', 'dir'), defaults.get('datm_gswp3', 'domain')) - fsurfdat_78pft = os.path.join(dir_inputdata, defaults.get('surfdat', 'dir'), - defaults.get('surfdat', 'surfdat_78pft')) - fsurfdat_16pft = os.path.join(dir_inputdata, defaults.get('surfdat', 'dir'), - defaults.get('surfdat', 'surfdat_16pft')) - landuse_78pft = os.path.join(dir_inputdata, defaults.get('landuse', 'dir'), - defaults.get('landuse', 'landuse_78pft')) - landuse_16pft = os.path.join(dir_inputdata, defaults.get('landuse', 'dir'), - defaults.get('landuse', 'landuse_16pft')) - - datm_solardir = defaults.get('datm_gswp3', 'solardir') - datm_precdir = defaults.get('datm_gswp3', 'precdir') - datm_tpqwdir = defaults.get('datm_gswp3', 'tpqwdir') - datm_solartag = defaults.get('datm_gswp3', 'solartag') - datm_prectag = defaults.get('datm_gswp3', 'prectag') - datm_tpqwtag = defaults.get('datm_gswp3', 'tpqwtag') - datm_solarname = defaults.get('datm_gswp3', 'solarname') - datm_precname = defaults.get('datm_gswp3', 'precname') - datm_tpqwname = defaults.get('datm_gswp3', 'tpqwname') - - args = get_parser().parse_args() - # --------------------------------- # today = date.today() @@ -518,7 +501,6 @@ def main(): log_level = logging.DEBUG setup_logging(log_file, log_level) - log = logging.getLogger(__name__) print("User = " + myname) print("Current directory = " + pwd) @@ -530,159 +512,145 @@ def main(): get_parser().print_help() quit() - # -- Specify which types of data to subset - create_domain = args.create_domain - create_surfdata = args.create_surfdata - create_landuse = args.create_landuse - create_datm = args.create_datm - - crop_flag = args.crop_flag - if crop_flag: + # if the crop flag is on - we need to use a different landuse and surface + # data file + if args.crop_flag: num_pft = "78" - fsurf_in = fsurfdat_78pft - fluse_in = landuse_78pft + fsurf_in = defaults.get('surfdat', 'surfdat_78pft') + fluse_in = defaults.get('landuse', 'landuse_78pft') else: num_pft = "16" - fsurf_in = fsurfdat_16pft - fluse_in = landuse_16pft - - print("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) - - # -- Start and ending years for DATM data - datm_syr = args.datm_syr - datm_eyr = args.datm_eyr - - # -- Modify landunit structure - overwrite_single_pft = args.overwrite_single_pft - dominant_pft = args.dom_pft - zero_nonveg_landunits = args.zero_nonveg - uniform_snowpack = args.uni_snow - no_saturation_excess = args.no_saturation_excess - - # -- Set input and output filenames - # -- Specify input and output directories - dir_output = args.out_dir - if not os.path.isdir(dir_output): - os.mkdir(dir_output) - - dir_output_datm = os.path.join(dir_output, "datmdata/") - if create_datm: - if not os.path.isdir(dir_output_datm): - os.mkdir(dir_output_datm) + fsurf_in = defaults.get('surfdat', 'surfdat_16pft') + fluse_in = defaults.get('landuse', 'landuse_16pft') + + print("crop_flag = " + args.crop_flag.__str__() + " => num_pft =" + num_pft) + + # -- Specify input and output directories and files + + # top-level output directory + if not os.path.isdir(args.dir_output): + os.mkdir(args.dir_output) + + # datm data + dir_output_datm = "datmdata" + dir_input_datm = defaults.get('datm_gswp3', 'dir') + if args.create_datm: + if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)): + os.mkdir(os.path.join(args.out_dir, dir_output_datm)) print("dir_input_datm : ", dir_input_datm) - print("dir_output_datm : ", dir_output_datm) + print("dir_output_datm : ", os.path.join(args.out_dir, dir_output_datm)) + # -- Set up user mods directories and base files if args.create_user_mods: if not os.path.isdir(args.user_mods_dir): os.mkdir(args.user_mods_dir) cesmroot = path_to_ctsm_root() - # -- Create user_nl_clm file - nl_clm_base = os.path.join(cesmroot, "cime_config/user_nl_clm") - nl_clm = os.path.join(args.user_mods_dir, "user_nl_clm") - with open(nl_clm_base, 'r') as basefile, open(nl_clm, 'w') as userfile: - for line in basefile: - userfile.write(line) + # -- Create empty user_nl_clm file + if args.create_surfdata or args.create_landuse: + nl_clm_base = os.path.join(cesmroot, "cime_config/user_nl_clm") + nl_clm = os.path.join(args.user_mods_dir, "user_nl_clm") + with open(nl_clm_base, 'r') as basefile, open(nl_clm, 'w') as userfile: + for line in basefile: + userfile.write(line) - # -- Create user_nl_datm_streams file - if create_datm: - nl_datm_base = os.path.join(cesmroot, "components/cdeps/datm/cime_config/user_nl_datm_streams") + # -- Create empty user_nl_datm_streams file + if args.create_datm: + nl_datm_base = os.path.join( + cesmroot, "components/cdeps/datm/cime_config/user_nl_datm_streams") nl_datm = os.path.join(args.user_mods_dir, "user_nl_datm_streams") with open(nl_datm_base, 'r') as basefile, open(nl_datm, 'w') as userfile: for line in basefile: userfile.write(line) - + # Default files + dir_inputdata = defaults.get('main', 'clmforcingindir') + fdomain_in = os.path.join(dir_inputdata, defaults.get('domain', 'file')) + fdatmdomain_in = os.path.join(defaults.get('datm_gswp3', 'dir'), defaults.get('datm_gswp3', 'domain')) + datm_solardir = defaults.get('datm_gswp3', 'solardir') + datm_precdir = defaults.get('datm_gswp3', 'precdir') + datm_tpqwdir = defaults.get('datm_gswp3', 'tpqwdir') + datm_solartag = defaults.get('datm_gswp3', 'solartag') + datm_prectag = defaults.get('datm_gswp3', 'prectag') + datm_tpqwtag = defaults.get('datm_gswp3', 'tpqwtag') + datm_solarname = defaults.get('datm_gswp3', 'solarname') + datm_precname = defaults.get('datm_gswp3', 'precname') + datm_tpqwname = defaults.get('datm_gswp3', 'tpqwname') if args.run_type == "point": - print( - "----------------------------------------------------------------------------" - ) - print( - "This script extracts a single point from the global CTSM datasets." - ) - - # -- Specify point to extract - plon = args.plon - plat = args.plat - site_name = args.site_name + print("----------------------------------------------------------------------------") + print( "This script extracts a single point from the global CTSM datasets.") # -- Create SinglePoint Object single_point = SinglePointCase( - plat, - plon, - site_name, - create_domain, - create_surfdata, - create_landuse, - create_datm, - overwrite_single_pft, - dominant_pft, - zero_nonveg_landunits, - uniform_snowpack, - no_saturation_excess, + args.plat, + args.plon, + args.site_name, + args.create_domain, + args.create_surfdata, + args.create_landuse, + args.create_datm, + args.overwrite_single_pft, + args.dominant_pft, + args.zero_nonveg_landunits, + args.uniform_snowpack, + args.no_saturation_excess, + args.dir_output ) single_point.create_tag() # -- Create CTSM domain file - if create_domain: + if single_point.create_domain: # -- Specify land domain file --------------------------------- - fdomain_out = dir_output + single_point.add_tag_to_filename( - fdomain_in, single_point.tag - ) - print(fdomain_out) single_point.fdomain_in = fdomain_in - single_point.fdomain_out = fdomain_out - print("fdomain_in :", fdomain_in) # - print("fdomain_out :", fdomain_out) # + single_point.fdomain_out = single_point.add_tag_to_filename(fdomain_in, single_point.tag) + print("fdomain_in :", fdomain_in) + print("fdomain_out :", single_point.fdomain_out) single_point.create_domain_at_point() # -- Create CTSM surface data file - if create_surfdata: + if single_point.create_surfdata: # -- Specify surface file --------------------------------- - fsurf_out = dir_output + single_point.create_fileout_name( - fsurf_in, single_point.tag - ) single_point.fsurf_in = fsurf_in - single_point.fsurf_out = fsurf_out + single_point.fsurf_out = single_point.create_fileout_name(fsurf_in, single_point.tag) print("fsurf_in :", fsurf_in) - print("fsurf_out :", fsurf_out) + print("fsurf_out :", single_point.fsurf_out) single_point.create_surfdata_at_point() + + # write to user_nl_clm if specified if args.create_user_mods: nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") - line = 'fsurdat = ' + "'" + fsurf_out + "'" + line = 'fsurdat = ' + "'$CLM_USRDAT_DIR/" + single_point.fsurf_out + "'" nl_clm.write('\n' + line + '\n') nl_clm.close() # -- Create CTSM transient landuse data file - if create_landuse: + if single_point.create_landuse: # -- Specify surface file --------------------------------- - fluse_out = dir_output + single_point.create_fileout_name( - fluse_in, single_point.tag - ) single_point.fluse_in = fluse_in - single_point.fluse_out = fluse_out + single_point.fluse_out = single_point.create_fileout_name(fluse_in, single_point.tag) print("fluse_in :", fluse_in) - print("fluse_out :", fluse_out) + print("fluse_out :", single_point.fluse_out) single_point.create_landuse_at_point() + + # write to user_nl_clm data if specified if args.create_user_mods: nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") - line = 'landuse = ' + fluse_out + line = 'fsurdat = ' + "'$CLM_USRDAT_DIR/" + single_point.fluse_out + "'" nl_clm.write('\n' + line + '\n') nl_clm.close() # -- Create single point atmospheric forcing data - if create_datm: - # -- Specify datm domain file --------------------------------- - fdatmdomain_out = dir_output_datm + single_point.add_tag_to_filename( - fdatmdomain_in, single_point.tag - ) + if single_point.create_datm: + # -- Specify datm and subset domain file --------------------------------- single_point.fdatmdomain_in = fdatmdomain_in - single_point.fdatmdomain_out = fdatmdomain_out + single_point.fdatmdomain_out = os.path.join(dir_output_datm, single_point.add_tag_to_filename(fdatmdomain_in, single_point.tag)) print("fdatmdomain_in : ", fdatmdomain_in) print("fdatmdomain out : ", fdatmdomain_out) single_point.create_datmdomain_at_point() + + # -- Specify DATM directories, tags, and stream names single_point.datm_syr = datm_syr single_point.datm_eyr = datm_eyr single_point.dir_input_datm = dir_input_datm @@ -698,41 +666,30 @@ def main(): single_point.name_tpqw = datm_tpqwname single_point.create_datm_at_point(args.create_user_mods, nl_datm) + # -- Write shell commands if args.create_user_mods: - shell_commands_file = open(os.path.join(args.user_mods_dir, - "shell_commands"), 'w') + shell_commands_file = open(os.path.join(args.user_mods_dir,"shell_commands"), 'w') single_point.write_shell_commands(shell_commands_file) - print("Successfully ran script for single point.") exit() elif args.run_type == "reg": - print("Running the script for the region") - - # -- Specify region to extract - lat1 = args.lat1 - lat2 = args.lat2 - - lon1 = args.lon1 - lon2 = args.lon2 - - reg_name = args.reg_name + print("----------------------------------------------------------------------------") + print( "This script extracts a region from the global CTSM datasets.") + # -- Create Region Object region = RegionalCase( - lat1, - lat2, - lon1, - lon2, - reg_name, - create_domain, - create_surfdata, - create_landuse, - create_datm, + args.lat1, + args.lat2, + args.lon1, + args.lon2, + args.reg_name, + args.create_domain, + args.create_surfdata, + args.create_landuse, + args.create_datm, ) - - print(region) - region.create_tag() # -- Set time stamp @@ -743,29 +700,28 @@ def main(): print(timetag) # -- Specify land domain file --------------------------------- - fdomain_out = ( - dir_output + "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc" - ) + fdomain_out = (dir_output + "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc") + # SinglePointCase.set_fdomain (fdomain) region.fdomain_in = fdomain_in region.fdomain_out = fdomain_out # -- Specify surface data file -------------------------------- fsurf_out = ( - dir_output - + "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" - + region.tag - + "_c170824.nc" + dir_output + + "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" + + region.tag + + "_c170824.nc" ) region.fsurf_in = fsurf_in region.fsurf_out = fsurf_out # -- Specify landuse file ------------------------------------- fluse_out = ( - dir_output - + "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" - + region.tag - + ".c170824.nc" + dir_output + + "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" + + region.tag + + ".c170824.nc" ) region.fluse_in = fluse_in region.fluse_out = fluse_out From d2b8afb4abea113dae30d9f08a506d9ad31d8ffe Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Thu, 9 Dec 2021 14:33:16 -0700 Subject: [PATCH 025/223] fix typo with args.outputdir --- python/ctsm/subset_data.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index a10a2a7dc9..e1c5aeed27 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -528,17 +528,17 @@ def main(): # -- Specify input and output directories and files # top-level output directory - if not os.path.isdir(args.dir_output): - os.mkdir(args.dir_output) + if not os.path.isdir(args.outdir): + os.mkdir(args.outdir) # datm data dir_output_datm = "datmdata" dir_input_datm = defaults.get('datm_gswp3', 'dir') if args.create_datm: - if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)): - os.mkdir(os.path.join(args.out_dir, dir_output_datm)) + if not os.path.isdir(os.path.join(args.outdir, dir_output_datm)): + os.mkdir(os.path.join(args.outdir, dir_output_datm)) print("dir_input_datm : ", dir_input_datm) - print("dir_output_datm : ", os.path.join(args.out_dir, dir_output_datm)) + print("dir_output_datm : ", os.path.join(args.outdir, dir_output_datm)) # -- Set up user mods directories and base files if args.create_user_mods: From 6e39fc5c8ce7f18ad2331649ee80760ebc23901a Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 9 Dec 2021 16:35:53 -0700 Subject: [PATCH 026/223] use ctsm logging... --- python/ctsm/site_and_regional/base_case.py | 8 ++- .../ctsm/site_and_regional/regional_case.py | 20 ++++--- .../site_and_regional/single_point_case.py | 36 ++++++------ python/ctsm/subset_data.py | 58 +++++++++---------- 4 files changed, 64 insertions(+), 58 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 1a17762281..5b8912f648 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -1,4 +1,5 @@ import os +import logging import numpy as np import xarray as xr import subprocess @@ -8,6 +9,7 @@ myname = getuser() +logger = logging.getLogger(__name__) class BaseCase: """ @@ -62,7 +64,7 @@ def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): x_dim: dimension name in X -- lon y_dim: dimension name in Y -- lat """ - print("Open file: " + filename) + logging.debug("Open file: " + filename) f1 = xr.open_dataset(filename) # create 1d coordinate variables to enable sel() method @@ -90,7 +92,7 @@ def add_tag_to_filename(filename, tag): if basename[cend] == "c": cend = cend - 1 if (basename[cend] != ".") and (basename[cend] != "_"): - print("Trouble figuring out where to add tag to filename:" + filename) + logging.error("Trouble figuring out where to add tag to filename:" + filename) os.abort() today = date.today() today_string = today.strftime("%y%m%d") @@ -123,7 +125,7 @@ def update_metadata(self, nc): for attr in del_attrs: if attr in attr_list: - # print ("This attr should be deleted:", attr) + logging.debug ("This attr should be deleted : "+ attr) del nc.attrs[attr] # for attr, value in attr_list.items(): diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 4b0d4fbb3c..18893aa5d5 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -2,7 +2,9 @@ import numpy as np import xarray as xr +import logging +logger = logging.getLogger(__name__) class RegionalCase(BaseCase): """ @@ -43,8 +45,8 @@ def create_tag(self): ) def create_domain_at_reg(self): - # print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print("Creating domain file at region:", self.tag) + #logging.debug ("Creating domain file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + logging.info("Creating domain file at region:"+ self.tag) # create 1d coordinate variables to enable sel() method f2 = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") lat = f2["lat"] @@ -61,13 +63,13 @@ def create_domain_at_reg(self): wfile = self.fdomain_out # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") - print("Successfully created file (fdomain_out)" + self.fdomain_out) + logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) f2.close() f3.close() def create_surfdata_at_reg(self): - # print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print("Creating surface dataset file at region:", self.tag) + #logging.debug ("Creating surface dataset file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + logging.info("Creating surface dataset file at region:"+ self.tag) # create 1d coordinate variables to enable sel() method filename = self.fsurf_in f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") @@ -84,14 +86,14 @@ def create_surfdata_at_reg(self): # mode 'w' overwrites file f3.to_netcdf(path=self.fsurf_out, mode="w") - print("created file (fsurf_out)" + self.fsurf_out) + logging.info("created file (fsurf_out)" + self.fsurf_out) # f1.close(); f2.close() f3.close() def create_landuse_at_reg(self): - # print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) - print("Creating surface dataset file at region:", self.tag) + #logging.debug ("Creating landuse file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + logging.info("Creating landuse file at region:"+ self.tag) # create 1d coordinate variables to enable sel() method f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") lat = f2["lat"] @@ -108,6 +110,6 @@ def create_landuse_at_reg(self): wfile = self.fluse_out # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") - print("Successfully created file (fdomain_out)" + self.fdomain_out) + logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) f2.close() f3.close() diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 2f4949572a..92a50c1b0b 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -1,9 +1,11 @@ from ctsm.site_and_regional.base_case import BaseCase import os +import logging import numpy as np import xarray as xr from datetime import date +logger = logging.getLogger(__name__) class SinglePointCase(BaseCase): """ @@ -95,8 +97,8 @@ def create_fileout_name(filename, tag): return new_string def create_domain_at_point(self): - print("----------------------------------------------------------------------") - print("Creating domain file at ", self.plon, self.plat) + logging.info("----------------------------------------------------------------------") + logging.info("Creating domain file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") # create 1d coordinate variables to enable sel() method f2 = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") # extract gridcell closest to plon/plat @@ -110,13 +112,13 @@ def create_domain_at_point(self): wfile = self.fdomain_out f3.to_netcdf(path=wfile, mode="w") - print("Successfully created file (fdomain_out)" + self.fdomain_out) + logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) f2.close() f3.close() def create_landuse_at_point(self): - print("----------------------------------------------------------------------") - print("Creating landuse file at ", self.plon, self.plat, ".") + logging.info("----------------------------------------------------------------------") + logging.info("Creating landuse file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") # create 1d coordinate variables to enable sel() method f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat @@ -143,13 +145,13 @@ def create_landuse_at_point(self): wfile = self.fluse_out # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") - print("Successfully created file (luse_out)" + self.fluse_out, ".") + logging.info("Successfully created file (luse_out)" + self.fluse_out+ ".") f2.close() f3.close() def create_surfdata_at_point(self): - print("----------------------------------------------------------------------") - print("Creating surface dataset file at ", self.plon, self.plat, ".") + logging.info("----------------------------------------------------------------------") + logging.info("Creating surface dataset file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") # create 1d coordinate variables to enable sel() method filename = self.fsurf_in f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") @@ -200,13 +202,13 @@ def create_surfdata_at_point(self): del f3.attrs["History_Log"] # mode 'w' overwrites file f3.to_netcdf(path=self.fsurf_out, mode="w") - print("Successfully created file (fsurf_out) :" + self.fsurf_out) + logging.info("Successfully created file (fsurf_out) :" + self.fsurf_out) f2.close() f3.close() def create_datmdomain_at_point(self): - print("----------------------------------------------------------------------") - print("Creating DATM domain file at ", self.plon, self.plat, ".") + logging.info("----------------------------------------------------------------------") + logging.info("Creating DATM domain file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") # create 1d coordinate variables to enable sel() method filename = self.fdatmdomain_in f2 = self.create_1d_coord(filename, "xc", "yc", "ni", "nj") @@ -220,7 +222,7 @@ def create_datmdomain_at_point(self): f3.attrs["Created_from"] = self.fdatmdomain_in # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") - print("Successfully created file (fdatmdomain_out) :" + self.fdatmdomain_out) + logging.info("Successfully created file (fdatmdomain_out) :" + self.fdatmdomain_out) f2.close() f3.close() @@ -239,13 +241,13 @@ def extract_datm_at(self, file_in, file_out): f3.attrs["Created_from"] = file_in # mode 'w' overwrites file f3.to_netcdf(path=file_out, mode="w") - print("Successfully created file :" + file_out) + logging.info("Successfully created file :" + file_out) f2.close() f3.close() def create_datm_at_point(self): - print("----------------------------------------------------------------------") - print("Creating DATM files at ", self.plon, self.plat, ".") + logging.info("----------------------------------------------------------------------") + logging.info("Creating DATM files at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") # -- specify subdirectory names and filename prefixes solrdir = "Solar/" precdir = "Precip/" @@ -280,9 +282,9 @@ def create_datm_at_point(self): nm = len(infile) for n in range(nm): - print(outfile[n]) + logging.debug(outfile[n]) file_in = infile[n] file_out = outfile[n] self.extract_datm_at(file_in, file_out) - print("All DATM files are created in: " + self.dir_output_datm) + logging.info("All DATM files are created in: "+ self.dir_output_datm+".") diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index a3f7b0769e..353bca05aa 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -592,9 +592,6 @@ def main(): logger.info("hello!") logger.debug("hello!") - exit() - - args = get_parser().parse_args() # --------------------------------- # @@ -603,22 +600,22 @@ def main(): pwd = os.getcwd() - log_file = os.path.join(pwd, today_string + ".log") + #log_file = os.path.join(pwd, today_string + ".log") - log_level = logging.DEBUG - setup_logging(log_file, log_level) - log = logging.getLogger(__name__) + #log_level = logging.DEBUG + #setup_logging(log_file, log_level) + #log = logging.getLogger(__name__) - print("User = " + myname) - print("Current directory = " + pwd) + logging.info("User = " + myname) + logging.info("Current directory = " + pwd) # --------------------------------- # if args.run_type == "point": - print( + logging.info( "----------------------------------------------------------------------------" ) - print( + logging.info( "This script extracts a single point from the global CTSM inputdata datasets." ) @@ -665,7 +662,7 @@ def main(): ) single_point.create_tag() - print(single_point) + logging.debug(single_point) # output_to_logger (single_point) if crop_flag: @@ -673,7 +670,7 @@ def main(): else: num_pft = "16" - print("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) + logging.debug("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) # -- Set input and output filenames # -- Specify input and output directories @@ -686,12 +683,14 @@ def main(): dir_input_datm = os.path.join( dir_clm_forcedata, "atm_forcing.datm7.GSWP3.0.5d.v1.c170516/" ) + dir_output_datm = os.path.join(dir_output, "datmdata/") + # -- create output dir if it does not exist if not os.path.isdir(dir_output_datm): os.mkdir(dir_output_datm) - print("dir_input_datm : ", dir_input_datm) # - print("dir_output_datm : ", dir_output_datm) # + logging.info("dir_input_datm : "+ dir_input_datm) # + logging.info("dir_output_datm : "+ dir_output_datm) # # -- Set time stamp today = date.today() @@ -706,8 +705,8 @@ def main(): ) single_point.fdomain_in = fdomain_in single_point.fdomain_out = fdomain_out - print("fdomain_in :", fdomain_in) # - print("fdomain_out :", fdomain_out) # + logging.info("fdomain_in : "+ fdomain_in) # + logging.info("fdomain_out : "+ fdomain_out) # # -- Specify surface data file -------------------------------- if crop_flag: @@ -727,8 +726,9 @@ def main(): ) single_point.fsurf_in = fsurf_in single_point.fsurf_out = fsurf_out - print("fsurf_in :", fsurf_in) # - print("fsurf_out :", fsurf_out) # + + logging.info("fsurf_in : "+ fsurf_in) # + logging.info("fsurf_out : "+ fsurf_out) # # -- Specify landuse file ------------------------------------- if crop_flag: @@ -747,8 +747,8 @@ def main(): ) single_point.fluse_in = fluse_in single_point.fluse_out = fluse_out - print("fluse_in :", fluse_in) # - print("fluse_out :", fluse_out) # + logging.info("fluse_in : "+ fluse_in) # + logging.info("fluse_out : "+ fluse_out) # # -- Specify datm domain file --------------------------------- fdatmdomain_in = os.path.join( @@ -760,8 +760,8 @@ def main(): ) single_point.fdatmdomain_in = fdatmdomain_in single_point.fdatmdomain_out = fdatmdomain_out - print("fdatmdomain_in : ", fdatmdomain_in) # - print("fdatmdomain out : ", fdatmdomain_out) # + logging.info("fdatmdomain_in : "+ fdatmdomain_in) # + logging.info("fdatmdomain out : "+ fdatmdomain_out) # # -- Create CTSM domain file if create_domain: @@ -784,11 +784,11 @@ def main(): single_point.dir_output_datm = dir_output_datm single_point.create_datm_at_point() - print("Successfully ran script for single point.") + logging.info("Successfully ran script for single point.") exit() elif args.run_type == "reg": - print("Running the script for the region") + logging.info("Running the script for the region") # -- Specify region to extract lat1 = args.lat1 lat2 = args.lat2 @@ -821,14 +821,14 @@ def main(): create_datm, ) - print(region) + logging.debug(region) if crop_flag: num_pft = "78" else: num_pft = "16" - print(" crop_flag = " + crop_flag.__str__() + " num_pft =" + num_pft) + logging.debug("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) region.create_tag() @@ -846,7 +846,7 @@ def main(): x2 = subprocess.Popen(command, stdout=subprocess.PIPE, shell="True") x = x2.communicate() timetag = x[0].strip() - print(timetag) + logging.info(timetag) # -- Specify land domain file --------------------------------- fdomain_in = ( @@ -898,7 +898,7 @@ def main(): # -- Create CTSM transient landuse data file if create_landuse: region.create_landuse_at_reg() - print("Successfully ran script for a regional case.") + logging.info("Successfully ran script for a regional case.") else: # print help when no option is chosen From fad54a033bdc03bfd0bb404203ad19ed83be1254 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 9 Dec 2021 17:22:35 -0700 Subject: [PATCH 027/223] remove the previous logging to file, update flags. --- .../site_and_regional/single_point_case.py | 6 +- python/ctsm/subset_data.py | 201 ++++++------------ tools/site_and_regional/subset_data | 2 +- 3 files changed, 71 insertions(+), 138 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 92a50c1b0b..17e103a120 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -51,7 +51,7 @@ def __init__( dominant_pft, zero_nonveg_landunits, uniform_snowpack, - no_saturation_excess, + saturation_excess, ): super().__init__(create_domain, create_surfdata, create_landuse, create_datm) self.plat = plat @@ -61,7 +61,7 @@ def __init__( self.dominant_pft = dominant_pft self.zero_nonveg_landunits = zero_nonveg_landunits self.uniform_snowpack = uniform_snowpack - self.no_saturation_excess = no_saturation_excess + self.saturation_excess = saturation_excess def create_tag(self): if self.site_name: @@ -176,7 +176,7 @@ def create_surfdata_at_point(self): f3["PCT_GLACIER"][:, :] = 0.0 if self.uniform_snowpack: f3["STD_ELEV"][:, :] = 20.0 - if self.no_saturation_excess: + if not self.saturation_excess: f3["FMAX"][:, :] = 0.0 # specify dimension order diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 353bca05aa..50050616d9 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -101,16 +101,15 @@ setup_logging_pre_config, add_logging_args, process_logging_args, -) - +) + logger = logging.getLogger(__name__) myname = getuser() - def get_parser(): """ - Get parser object for this script. + Get the parser object for subset_data.py script. """ parser = ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter @@ -123,6 +122,7 @@ def get_parser(): pt_parser = subparsers.add_parser("point", help="Run script for a single point.") rg_parser = subparsers.add_parser("reg", help="Run script for a region.") + # -- signle point parser options pt_parser.add_argument( "--lat", help="Single point latitude. [default: %(default)s]", @@ -151,7 +151,7 @@ def get_parser(): default="", ) pt_parser.add_argument( - "--create_domain", + "--create-domain", help="Flag for creating CLM domain file at single point. [default: %(default)s]", action="store", dest="create_domain", @@ -162,7 +162,7 @@ def get_parser(): default=False, ) pt_parser.add_argument( - "--create_surface", + "--create-surface", help="Flag for creating surface data file at single point. [default: %(default)s]", action="store", dest="create_surfdata", @@ -173,7 +173,7 @@ def get_parser(): default=True, ) pt_parser.add_argument( - "--create_landuse", + "--create-landuse", help="Flag for creating landuse data file at single point. [default: %(default)s]", action="store", dest="create_landuse", @@ -184,7 +184,7 @@ def get_parser(): default=False, ) pt_parser.add_argument( - "--create_datm", + "--create-datm", help="Flag for creating DATM forcing data at single point. [default: %(default)s]", action="store", dest="create_datm", @@ -195,7 +195,7 @@ def get_parser(): default=False, ) pt_parser.add_argument( - "--datm_syr", + "--datm-syr", help="Start year for creating DATM forcing at single point. [default: %(default)s]", action="store", dest="datm_syr", @@ -204,7 +204,7 @@ def get_parser(): default=1901, ) pt_parser.add_argument( - "--datm_eyr", + "--datm-eyr", help="End year for creating DATM forcing at single point. [default: %(default)s]", action="store", dest="datm_eyr", @@ -214,9 +214,13 @@ def get_parser(): ) pt_parser.add_argument( "--crop", - help="Create datasets using the extensive list of prognostic crop types. [default: %(default)s]", - action="store_true", + help="Flag for creating datasets using the extensive list of prognostic crop types. [default: %(default)s]", + action="store", dest="crop_flag", + type=str2bool, + nargs="?", + const=True, + required=False, default=False, ) pt_parser.add_argument( @@ -228,33 +232,47 @@ def get_parser(): default=7, ) pt_parser.add_argument( - "--no-unisnow", - help="Turn off the flag for create uniform snowpack. [default: %(default)s]", - action="store_false", + "--unisnow", + help="Flag for creating datasets using uniform snowpack. [default: %(default)s]", + action="store", dest="uni_snow", + type=str2bool, + nargs="?", + const=True, + required=False, default=True, ) pt_parser.add_argument( - "--no-overwrite_single_pft", - help="Turn off the flag for making the whole grid 100%% single PFT. [default: %(default)s]", - action="store_false", + "--single-pft", + help="Flag for making the whole grid 100%% single PFT. [default: %(default)s]", + action="store", dest="overwrite_single_pft", + type=str2bool, + nargs="?", + const=True, + required=False, default=True, ) pt_parser.add_argument( - "--zero_nonveg", - help="Set all non-vegetation landunits to zero. [default: %(default)s]", + "--zero-nonveg", + help="Flag for setting all non-vegetation landunits to zero. [default: %(default)s]", action="store", dest="zero_nonveg", - type=bool, + type=str2bool, + nargs="?", + const=True, + required=False, default=True, ) pt_parser.add_argument( - "--no_saturation_excess", - help="Turn off the flag for saturation excess. [default: %(default)s]", + "--saturation-excess", + help="Flag for making dataset using saturation excess. [default: %(default)s]", action="store", - dest="no_saturation_excess", - type=bool, + dest="saturation_excess", + type=str2bool, + nargs="?", + const=True, + required=False, default=True, ) pt_parser.add_argument( @@ -266,6 +284,7 @@ def get_parser(): default="/glade/scratch/" + myname + "/single_point/", ) + # -- region-specific parser options rg_parser.add_argument( "--lat1", help="Region start latitude. [default: %(default)s]", @@ -312,7 +331,7 @@ def get_parser(): default="", ) rg_parser.add_argument( - "--create_domain", + "--create-domain", help="Flag for creating CLM domain file for a region. [default: %(default)s]", action="store", dest="create_domain", @@ -323,7 +342,7 @@ def get_parser(): default=False, ) rg_parser.add_argument( - "--create_surface", + "--create-surface", help="Flag for creating surface data file for a region. [default: %(default)s]", action="store", dest="create_surfdata", @@ -334,7 +353,7 @@ def get_parser(): default=True, ) rg_parser.add_argument( - "--create_landuse", + "--create-landuse", help="Flag for creating landuse data file for a region. [default: %(default)s]", action="store", dest="create_landuse", @@ -345,7 +364,7 @@ def get_parser(): default=False, ) rg_parser.add_argument( - "--create_datm", + "--create-datm", help="Flag for creating DATM forcing data for a region. [default: %(default)s]", action="store", dest="create_datm", @@ -356,7 +375,7 @@ def get_parser(): default=False, ) rg_parser.add_argument( - "--datm_syr", + "--datm-syr", help="Start year for creating DATM forcing for a region. [default: %(default)s]", action="store", dest="datm_syr", @@ -365,7 +384,7 @@ def get_parser(): default=1901, ) rg_parser.add_argument( - "--datm_eyr", + "--datm-eyr", help="End year for creating DATM forcing for a region. [default: %(default)s]", action="store", dest="datm_eyr", @@ -485,7 +504,7 @@ def get_git_sha(): """ try: - #os.abspath(__file__) + # os.abspath(__file__) sha = ( subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) .strip() @@ -496,89 +515,6 @@ def get_git_sha(): return sha -def setup_logging(log_file, log_level): - """ - Setup logging to log to console and log file. - """ - - root_logger = logging.getLogger() - root_logger.setLevel(log_level) - - # setup log file - one_mb = 1000000 - handler = logging.handlers.RotatingFileHandler( - log_file, maxBytes=one_mb, backupCount=10 - ) - - fmt = logging.Formatter( - "%(asctime)s %(name)-12s %(levelname)-8s %(message)s", - datefmt="%y-%m-%d %H:%M:%S", - ) - - handler.setFormatter(fmt) - root_logger.addHandler(handler) - - # setup logging to console - stream_handler = logging.StreamHandler(sys.stdout) - stream_handler.setFormatter(fmt) - root_logger.addHandler(stream_handler) - - # redirect stdout/err to log file - StreamToLogger.setup_stdout() - StreamToLogger.setup_stderr() - - -class StreamToLogger(object): - """ - Custom class to log all stdout and stderr streams. - modified from: - https://www.electricmonk.nl/log/2011/08/14/redirect-stdout-and-stderr-to-a-logger-in-python/ - """ - - def __init__( - self, stream, logger, log_level=logging.INFO, also_log_to_stream=False - ): - self.logger = logger - self.stream = stream - self.log_level = log_level - self.linebuf = "" - self.also_log_to_stream = also_log_to_stream - - @classmethod - def setup_stdout(cls, also_log_to_stream=True): - """ - Setup logger for stdout - """ - stdout_logger = logging.getLogger("STDOUT") - sl = StreamToLogger(sys.stdout, stdout_logger, logging.INFO, also_log_to_stream) - sys.stdout = sl - - @classmethod - def setup_stderr(cls, also_log_to_stream=True): - """ - Setup logger for stdout - """ - stderr_logger = logging.getLogger("STDERR") - sl = StreamToLogger( - sys.stderr, stderr_logger, logging.ERROR, also_log_to_stream - ) - sys.stderr = sl - - def write(self, buf): - temp_linebuf = self.linebuf + buf - self.linebuf = "" - for line in temp_linebuf.splitlines(True): - if line[-1] == "\n": - self.logger.log(self.log_level, line.rstrip()) - else: - self.linebuf += line - - def flush(self): - if self.linebuf != "": - self.logger.log(self.log_level, self.linebuf.rstrip()) - self.linebuf = "" - - def main(): # -- add logging flags from ctsm_logging @@ -590,9 +526,6 @@ def main(): process_logging_args(args) - logger.info("hello!") - logger.debug("hello!") - # --------------------------------- # today = date.today() @@ -600,11 +533,11 @@ def main(): pwd = os.getcwd() - #log_file = os.path.join(pwd, today_string + ".log") + # log_file = os.path.join(pwd, today_string + ".log") - #log_level = logging.DEBUG - #setup_logging(log_file, log_level) - #log = logging.getLogger(__name__) + # log_level = logging.DEBUG + # setup_logging(log_file, log_level) + # log = logging.getLogger(__name__) logging.info("User = " + myname) logging.info("Current directory = " + pwd) @@ -643,7 +576,7 @@ def main(): dominant_pft = args.dom_pft zero_nonveg_landunits = args.zero_nonveg uniform_snowpack = args.uni_snow - no_saturation_excess = args.no_saturation_excess + saturation_excess = args.saturation_excess # -- Create SinglePoint Object single_point = SinglePointCase( @@ -658,7 +591,7 @@ def main(): dominant_pft, zero_nonveg_landunits, uniform_snowpack, - no_saturation_excess, + saturation_excess, ) single_point.create_tag() @@ -689,8 +622,8 @@ def main(): if not os.path.isdir(dir_output_datm): os.mkdir(dir_output_datm) - logging.info("dir_input_datm : "+ dir_input_datm) # - logging.info("dir_output_datm : "+ dir_output_datm) # + logging.info("dir_input_datm : " + dir_input_datm) # + logging.info("dir_output_datm : " + dir_output_datm) # # -- Set time stamp today = date.today() @@ -705,8 +638,8 @@ def main(): ) single_point.fdomain_in = fdomain_in single_point.fdomain_out = fdomain_out - logging.info("fdomain_in : "+ fdomain_in) # - logging.info("fdomain_out : "+ fdomain_out) # + logging.info("fdomain_in : " + fdomain_in) # + logging.info("fdomain_out : " + fdomain_out) # # -- Specify surface data file -------------------------------- if crop_flag: @@ -727,8 +660,8 @@ def main(): single_point.fsurf_in = fsurf_in single_point.fsurf_out = fsurf_out - logging.info("fsurf_in : "+ fsurf_in) # - logging.info("fsurf_out : "+ fsurf_out) # + logging.info("fsurf_in : " + fsurf_in) # + logging.info("fsurf_out : " + fsurf_out) # # -- Specify landuse file ------------------------------------- if crop_flag: @@ -747,8 +680,8 @@ def main(): ) single_point.fluse_in = fluse_in single_point.fluse_out = fluse_out - logging.info("fluse_in : "+ fluse_in) # - logging.info("fluse_out : "+ fluse_out) # + logging.info("fluse_in : " + fluse_in) # + logging.info("fluse_out : " + fluse_out) # # -- Specify datm domain file --------------------------------- fdatmdomain_in = os.path.join( @@ -760,8 +693,8 @@ def main(): ) single_point.fdatmdomain_in = fdatmdomain_in single_point.fdatmdomain_out = fdatmdomain_out - logging.info("fdatmdomain_in : "+ fdatmdomain_in) # - logging.info("fdatmdomain out : "+ fdatmdomain_out) # + logging.info("fdatmdomain_in : " + fdatmdomain_in) # + logging.info("fdatmdomain out : " + fdatmdomain_out) # # -- Create CTSM domain file if create_domain: diff --git a/tools/site_and_regional/subset_data b/tools/site_and_regional/subset_data index 0c4cb9f28d..dcfc957d5a 100755 --- a/tools/site_and_regional/subset_data +++ b/tools/site_and_regional/subset_data @@ -4,6 +4,7 @@ This is a just top-level skeleton script that calls subset_data.py. The original code (subset_data.py) is located under python/ctsm folder. + For full instructions on how to run the code and different options, please check python/ctsm/subset_data.py file. @@ -25,7 +26,6 @@ import sys _CTSM_PYTHON = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" ) -# print (_CTSM_PYTHON) sys.path.insert(1, _CTSM_PYTHON) from ctsm.subset_data import main From 77d23da728e78786f3e55bdad40bab9490f06f7b Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Thu, 9 Dec 2021 17:39:08 -0700 Subject: [PATCH 028/223] fix writing out of user_mods files --- python/ctsm/subset_data.py | 70 ++++++++++++++++++++------------------ 1 file changed, 37 insertions(+), 33 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index e1c5aeed27..91cc3940d2 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -528,17 +528,17 @@ def main(): # -- Specify input and output directories and files # top-level output directory - if not os.path.isdir(args.outdir): - os.mkdir(args.outdir) + if not os.path.isdir(args.out_dir): + os.mkdir(args.out_dir) # datm data dir_output_datm = "datmdata" dir_input_datm = defaults.get('datm_gswp3', 'dir') if args.create_datm: - if not os.path.isdir(os.path.join(args.outdir, dir_output_datm)): - os.mkdir(os.path.join(args.outdir, dir_output_datm)) + if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)): + os.mkdir(os.path.join(args.out_dir, dir_output_datm)) print("dir_input_datm : ", dir_input_datm) - print("dir_output_datm : ", os.path.join(args.outdir, dir_output_datm)) + print("dir_output_datm : ", os.path.join(args.out_dir, dir_output_datm)) # -- Set up user mods directories and base files if args.create_user_mods: @@ -566,8 +566,10 @@ def main(): # Default files dir_inputdata = defaults.get('main', 'clmforcingindir') + dir_inputsurf = defaults.get('surfdat', 'dir') + dir_inputluse = defaults.get('landuse', 'dir') fdomain_in = os.path.join(dir_inputdata, defaults.get('domain', 'file')) - fdatmdomain_in = os.path.join(defaults.get('datm_gswp3', 'dir'), defaults.get('datm_gswp3', 'domain')) + fdatmdomain_in = os.path.join(dir_input_datm, defaults.get('datm_gswp3', 'domain')) datm_solardir = defaults.get('datm_gswp3', 'solardir') datm_precdir = defaults.get('datm_gswp3', 'precdir') datm_tpqwdir = defaults.get('datm_gswp3', 'tpqwdir') @@ -592,29 +594,29 @@ def main(): args.create_landuse, args.create_datm, args.overwrite_single_pft, - args.dominant_pft, - args.zero_nonveg_landunits, - args.uniform_snowpack, + args.dom_pft, + args.zero_nonveg, + args.uni_snow, args.no_saturation_excess, - args.dir_output + args.out_dir ) single_point.create_tag() # -- Create CTSM domain file if single_point.create_domain: # -- Specify land domain file --------------------------------- - single_point.fdomain_in = fdomain_in + single_point.fdomain_in = os.path.join(dir_inputdata, fdomain_in) single_point.fdomain_out = single_point.add_tag_to_filename(fdomain_in, single_point.tag) - print("fdomain_in :", fdomain_in) - print("fdomain_out :", single_point.fdomain_out) + print("fdomain_in :", single_point.fdomain_in) + print("fdomain_out :", os.path.join(single_point.output_dir, single_point.fdomain_out)) single_point.create_domain_at_point() # -- Create CTSM surface data file if single_point.create_surfdata: # -- Specify surface file --------------------------------- - single_point.fsurf_in = fsurf_in + single_point.fsurf_in = os.path.join(dir_inputdata, dir_inputsurf, fsurf_in) single_point.fsurf_out = single_point.create_fileout_name(fsurf_in, single_point.tag) - print("fsurf_in :", fsurf_in) + print("fsurf_in :", single_point.fsurf_in) print("fsurf_out :", single_point.fsurf_out) single_point.create_surfdata_at_point() @@ -628,9 +630,9 @@ def main(): # -- Create CTSM transient landuse data file if single_point.create_landuse: # -- Specify surface file --------------------------------- - single_point.fluse_in = fluse_in + single_point.fluse_in = os.path.join(dir_inputdata, dir_inputluse, fluse_in) single_point.fluse_out = single_point.create_fileout_name(fluse_in, single_point.tag) - print("fluse_in :", fluse_in) + print("fluse_in :", single_point.fluse_in) print("fluse_out :", single_point.fluse_out) single_point.create_landuse_at_point() @@ -644,15 +646,17 @@ def main(): # -- Create single point atmospheric forcing data if single_point.create_datm: # -- Specify datm and subset domain file --------------------------------- - single_point.fdatmdomain_in = fdatmdomain_in - single_point.fdatmdomain_out = os.path.join(dir_output_datm, single_point.add_tag_to_filename(fdatmdomain_in, single_point.tag)) - print("fdatmdomain_in : ", fdatmdomain_in) - print("fdatmdomain out : ", fdatmdomain_out) + single_point.fdatmdomain_in = os.path.join(dir_input_datm, fdatmdomain_in) + single_point.fdatmdomain_out = os.path.join(dir_output_datm, + single_point.add_tag_to_filename(single_point.fdatmdomain_in, + single_point.tag)) + print("fdatmdomain_in : ", single_point.fdatmdomain_in) + print("fdatmdomain out : ", single_point.fdatmdomain_out) single_point.create_datmdomain_at_point() # -- Specify DATM directories, tags, and stream names - single_point.datm_syr = datm_syr - single_point.datm_eyr = datm_eyr + single_point.datm_syr = args.datm_syr + single_point.datm_eyr = args.datm_eyr single_point.dir_input_datm = dir_input_datm single_point.dir_output_datm = dir_output_datm single_point.dir_solar = datm_solardir @@ -700,16 +704,16 @@ def main(): print(timetag) # -- Specify land domain file --------------------------------- - fdomain_out = (dir_output + "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc") + fdomain_out = os.path.join(args.out_dir, "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc") # SinglePointCase.set_fdomain (fdomain) region.fdomain_in = fdomain_in region.fdomain_out = fdomain_out # -- Specify surface data file -------------------------------- - fsurf_out = ( - dir_output - + "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" + fsurf_out = os.path.join( + args.out_dir, + "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" + region.tag + "_c170824.nc" ) @@ -717,9 +721,9 @@ def main(): region.fsurf_out = fsurf_out # -- Specify landuse file ------------------------------------- - fluse_out = ( - dir_output - + "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" + fluse_out = os.path.join( + args.out_dir, + "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" + region.tag + ".c170824.nc" ) @@ -727,15 +731,15 @@ def main(): region.fluse_out = fluse_out # -- Create CTSM domain file - if create_domain: + if region.create_domain: region.create_domain_at_reg() # -- Create CTSM surface data file - if create_surfdata: + if region.create_surfdata: region.create_surfdata_at_reg() # -- Create CTSM transient landuse data file - if create_landuse: + if region.create_landuse: region.create_landuse_at_reg() print("Successfully ran script for a regional case.") exit() From db56dc53ab3c487eb9c18ced92cf69a064b37e6e Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 9 Dec 2021 18:06:15 -0700 Subject: [PATCH 029/223] cleaning up the parsers. --- python/ctsm/subset_data.py | 284 ++++++++++++++----------------------- 1 file changed, 108 insertions(+), 176 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 50050616d9..bf8e0c1b36 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -86,6 +86,7 @@ import numpy as np import xarray as xr +import textwrap from datetime import date from getpass import getuser @@ -105,7 +106,6 @@ logger = logging.getLogger(__name__) -myname = getuser() def get_parser(): """ @@ -150,87 +150,6 @@ def get_parser(): type=str, default="", ) - pt_parser.add_argument( - "--create-domain", - help="Flag for creating CLM domain file at single point. [default: %(default)s]", - action="store", - dest="create_domain", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - pt_parser.add_argument( - "--create-surface", - help="Flag for creating surface data file at single point. [default: %(default)s]", - action="store", - dest="create_surfdata", - type=str2bool, - nargs="?", - const=True, - required=False, - default=True, - ) - pt_parser.add_argument( - "--create-landuse", - help="Flag for creating landuse data file at single point. [default: %(default)s]", - action="store", - dest="create_landuse", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - pt_parser.add_argument( - "--create-datm", - help="Flag for creating DATM forcing data at single point. [default: %(default)s]", - action="store", - dest="create_datm", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - pt_parser.add_argument( - "--datm-syr", - help="Start year for creating DATM forcing at single point. [default: %(default)s]", - action="store", - dest="datm_syr", - required=False, - type=int, - default=1901, - ) - pt_parser.add_argument( - "--datm-eyr", - help="End year for creating DATM forcing at single point. [default: %(default)s]", - action="store", - dest="datm_eyr", - required=False, - type=int, - default=2014, - ) - pt_parser.add_argument( - "--crop", - help="Flag for creating datasets using the extensive list of prognostic crop types. [default: %(default)s]", - action="store", - dest="crop_flag", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - pt_parser.add_argument( - "--dompft", - help="Dominant PFT type . [default: %(default)s] ", - action="store", - dest="dom_pft", - type=int, - default=7, - ) pt_parser.add_argument( "--unisnow", help="Flag for creating datasets using uniform snowpack. [default: %(default)s]", @@ -275,15 +194,6 @@ def get_parser(): required=False, default=True, ) - pt_parser.add_argument( - "--outdir", - help="Output directory. [default: %(default)s]", - action="store", - dest="out_dir", - type=str, - default="/glade/scratch/" + myname + "/single_point/", - ) - # -- region-specific parser options rg_parser.add_argument( "--lat1", @@ -330,92 +240,112 @@ def get_parser(): type=str, default="", ) - rg_parser.add_argument( - "--create-domain", - help="Flag for creating CLM domain file for a region. [default: %(default)s]", - action="store", - dest="create_domain", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - rg_parser.add_argument( - "--create-surface", - help="Flag for creating surface data file for a region. [default: %(default)s]", - action="store", - dest="create_surfdata", - type=str2bool, - nargs="?", - const=True, - required=False, - default=True, - ) - rg_parser.add_argument( - "--create-landuse", - help="Flag for creating landuse data file for a region. [default: %(default)s]", - action="store", - dest="create_landuse", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - rg_parser.add_argument( - "--create-datm", - help="Flag for creating DATM forcing data for a region. [default: %(default)s]", - action="store", - dest="create_datm", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - rg_parser.add_argument( - "--datm-syr", - help="Start year for creating DATM forcing for a region. [default: %(default)s]", - action="store", - dest="datm_syr", - required=False, - type=int, - default=1901, - ) - rg_parser.add_argument( - "--datm-eyr", - help="End year for creating DATM forcing for a region. [default: %(default)s]", - action="store", - dest="datm_eyr", - required=False, - type=int, - default=2014, - ) - rg_parser.add_argument( - "--crop", - help="Create datasets using the extensive list of prognostic crop types. [default: %(default)s]", - action="store_true", - dest="crop_flag", - default=False, - ) - rg_parser.add_argument( - "--dompft", - help="Dominant PFT type . [default: %(default)s] ", - action="store", - dest="dom_pft", - type=int, - default=7, - ) - rg_parser.add_argument( - "--outdir", - help="Output directory. [default: %(default)s]", - action="store", - dest="out_dir", - type=str, - default="/glade/scratch/" + myname + "/regional/", - ) + # -- common options between both subparsers + for subparser in [pt_parser, rg_parser]: + subparser.add_argument( + "--create-domain", + help="Flag for creating CLM domain file at single point/region. [default: %(default)s]", + action="store", + dest="create_domain", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + subparser.add_argument( + "--create-surface", + help="Flag for creating surface data file at single point/region. [default: %(default)s]", + action="store", + dest="create_surfdata", + type=str2bool, + nargs="?", + const=True, + required=False, + default=True, + ) + subparser.add_argument( + "--create-landuse", + help="Flag for creating landuse data file at single point/region. [default: %(default)s]", + action="store", + dest="create_landuse", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + subparser.add_argument( + "--create-datm", + help="Flag for creating DATM forcing data at single point/region. [default: %(default)s]", + action="store", + dest="create_datm", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + subparser.add_argument( + "--datm-syr", + help="Start year for creating DATM forcing at single point/region. [default: %(default)s]", + action="store", + dest="datm_syr", + required=False, + type=int, + default=1901, + ) + subparser.add_argument( + "--datm-eyr", + help="End year for creating DATM forcing at single point/region. [default: %(default)s]", + action="store", + dest="datm_eyr", + required=False, + type=int, + default=2014, + ) + subparser.add_argument( + "--crop", + help="Flag for creating datasets using the extensive list of prognostic crop types. [default: %(default)s]", + action="store", + dest="crop_flag", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + subparser.add_argument( + "--dompft", + help="Dominant PFT type . [default: %(default)s] ", + action="store", + dest="dom_pft", + type=int, + default=7, + ) + + if subparser == pt_parser: + parser_name = "single_point" + else: + parser_name = "regional" + + subparser.add_argument( + "--outdir", + help="Output directory. \n [default: %(default)s]", + action="store", + dest="out_dir", + type=str, + default=os.path.join(os.getcwd(), "subset_data_" + parser_name), + ) + + # -- print help for both subparsers + parser.epilog = textwrap.dedent( + f"""\ + {pt_parser.format_help()} + {rg_parser.format_help()} + """ + ) return parser @@ -531,6 +461,8 @@ def main(): today = date.today() today_string = today.strftime("%Y%m%d") + myname = getuser() + pwd = os.getcwd() # log_file = os.path.join(pwd, today_string + ".log") From 0ffa6e92303a7933b3fc70b287f75b4c96be38f0 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Fri, 10 Dec 2021 10:50:21 -0700 Subject: [PATCH 030/223] updates to make things more pythonic --- python/ctsm/site_and_regional/base_case.py | 39 +-- .../ctsm/site_and_regional/regional_case.py | 97 +++++-- .../site_and_regional/single_point_case.py | 260 +++++++++++------- python/ctsm/subset_data.py | 232 +++++++--------- 4 files changed, 358 insertions(+), 270 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index d30ff1346a..a97f4bd9dd 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -1,12 +1,14 @@ import os +import subprocess + import numpy as np import xarray as xr -import subprocess from datetime import date from getpass import getuser myname = getuser() +USRDAT_DIR = "CLM_USRDAT_DIR" class BaseCase: @@ -23,6 +25,8 @@ class BaseCase: flag for creating landuse file create_datm : bool flag for creating DATM files + create_user_mods + flag for creating a user_mods directory Methods ------- create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) @@ -30,25 +34,20 @@ class BaseCase: add_tag_to_filename(filename, tag) add a tag and timetag to a filename ending with [._]cYYMMDD.nc or [._]YYMMDD.nc + update_metadata(self, nc) + updates metadata for a netcdf file and removes attributes that should not be there """ - def __init__(self, create_domain, create_surfdata, create_landuse, create_datm): + def __init__(self, create_domain, create_surfdata, create_landuse, create_datm, create_user_mods): self.create_domain = create_domain self.create_surfdata = create_surfdata self.create_landuse = create_landuse self.create_datm = create_datm + self.create_user_mods = create_user_mods def __str__(self): - return ( - str(self.__class__) - + "\n" - + "\n".join( - ( - str(item) + " = " + str(self.__dict__[item]) - for item in sorted(self.__dict__) - ) - ) - ) + return "{}\n{}".format(str(self.__class__), "\n".join( + ("{} = {}".format(str(key), str(self.__dict__[key])) for key in sorted(self.__dict__)))) @staticmethod def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): @@ -128,15 +127,17 @@ def update_metadata(self, nc): @staticmethod def get_git_sha(): """ - Returns Git short SHA for the currect directory. + Returns Git short SHA for the current directory. """ try: - sha = ( - subprocess.check_output( - ["git", "rev-parse", "--short", "HEAD"]) - .strip() - .decode() - ) + sha = (subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).strip().decode()) except subprocess.CalledProcessError: sha = "NOT-A-GIT-REPOSITORY" return sha + + @staticmethod + def write_to_file(text, file): + """ + Writes text to a file, surrounding text with \n characters + """ + file.write("\n{}\n".format(text)) diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index de50e4a9e5..0099659dd6 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -1,49 +1,90 @@ -from ctsm.site_and_regional.base_case import BaseCase - import numpy as np -import xarray as xr + +from ctsm.site_and_regional.base_case import BaseCase class RegionalCase(BaseCase): """ A case to encapsulate regional cases. + ... + Attributes + ---------- + lat1 : float + start latitude + lat2 : float + end latitude + lon1 : float + start longitude + lon2 : float + end longitude + reg_name: str -- default = None + region name + tag : str + ending tag for output file naming + fluse_out : str + file name of output subset land use file + fluse_in : str + file name of input land use file to subset + fsurf_out : str + file name of output subset surface data file + fsurf_in : str + file name of input surface data to subset + fdomain_out : str + file name of output domain subset domain file + fdomain_in : str + file name of input domain file to subset + + Methods + ------- + create_tag + create a tag for a region which is the region name + or the "lon1-lon2-lat1-lat2" format if the region name does not exist. + create_domain_at_reg + Create domain file for a region + create_landuse_at_reg: + Create landuse file file for a region + create_surfdata_at_reg: + Create surface dataset file for a region + create_datmdomain_at_reg: + Create DATM domain file for a region """ def __init__( - self, - lat1, - lat2, - lon1, - lon2, - reg_name, - create_domain, - create_surfdata, - create_landuse, - create_datm, + self, + lat1, + lat2, + lon1, + lon2, + reg_name, + create_domain, + create_surfdata, + create_landuse, + create_datm, + create_user_mods, + output_dir, ): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm) + super().__init__(create_domain, create_surfdata, create_landuse, create_datm, create_user_mods) self.lat1 = lat1 self.lat2 = lat2 self.lon1 = lon1 self.lon2 = lon2 self.reg_name = reg_name + self.output_dir = output_dir + self.tag = None + self.fluse_out = None + self.fluse_in = None + self.fsurf_out = None + self.fsurf_in = None + self.fdomain_out = None + self.fdomain_in = None def create_tag(self): if self.reg_name: self.tag = self.reg_name else: - self.tag = ( - str(self.lon1) - + "-" - + str(self.lon2) - + "_" - + str(self.lat1) - + "-" - + str(self.lat2) - ) + self.tag = "{}-{}_{}-{}".format(str(self.lon1), str(self.lon2), str(self.lat1), str(self.lat2)) def create_domain_at_reg(self): - # print ("Creating domain file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) print("Creating domain file at region:", self.tag) # create 1d coordinate variables to enable sel() method f2 = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") @@ -66,12 +107,10 @@ def create_domain_at_reg(self): f3.close() def create_surfdata_at_reg(self): - # print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) print("Creating surface dataset file at region:", self.tag) # create 1d coordinate variables to enable sel() method filename = self.fsurf_in - f2 = self.create_1d_coord( - filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") lat = f2["lat"] lon = f2["lon"] # subset longitude and latitude arrays @@ -91,11 +130,9 @@ def create_surfdata_at_reg(self): f3.close() def create_landuse_at_reg(self): - # print ("Creating surface dataset file at region", self.lon1+"-"+self.lat2,self.lat1+"-"+self.lat2) print("Creating surface dataset file at region:", self.tag) # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord( - self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") lat = f2["lat"] lon = f2["lon"] # subset longitude and latitude arrays diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index ddbd08b2d9..34ba0a05ce 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -1,9 +1,12 @@ -from ctsm.site_and_regional.base_case import BaseCase import os + import numpy as np import xarray as xr + from datetime import date +from ctsm.site_and_regional.base_case import BaseCase, USRDAT_DIR + class SinglePointCase(BaseCase): """ @@ -17,12 +20,71 @@ class SinglePointCase(BaseCase): longitude site_name: str -- default = None Site name + overwrite_single_pft : bool + flag to overwrite surface data with one uniform plant functional type + dominant_pft: int + index of plant functional type to set to 100% cover if overwrite_single_pft = True + zero_nonveg_landunits : bool + flag to set surface data to all natural vegetation (100% NATVEG, 0% other) + uniform_snowpack + flag to set the the surface data STD_ELEV to 0.0 + no_saturation_excess : bool + flag to set the surface data FMAX to 0.0 + output_dir : str + main output directory to write subset files to + tag : str + ending tag for output file naming + fdomain_in : str + file name of input domain file to subset + fdomain_out : str + file name of output subset domain domain file + fluse_in : str + file name of input land use file to subset + fluse_out : str + file name of output subset land use file + fsurf_in : str + file name of input surface data file to subset + fsurf_out : str + file name of output subset surface data file + fdatmdomain_in : str + file name of input DATM domain file to subset + fdatmdomain_out : str + file name of output subset DATM domain file + datm_syr : int + starting year for subset DATM data + datm_eyr : int + ending year for subset DATM data + dir_tpqw : str + input directory for TPQW DATM data + dir_prec : str + input directory for precipitation DATM data + dir_solar : str + input directory for solar DATM data + tag_tpqw : str + tag (file naming convention) for input TPQW DATM data + tag_prec : str + tag (file naming convention) for input precipitation DATM data + tag_solar : str + tag (file naming convention) for input solar DATM data + name_tpqw : str + stream name for TPQW DATM data + name_prec : str + stream name for precipitation DATM data + name_solar : str + stream name for solar DATM data + dir_output_datm : str + directory to write subset DATM data to (default to within main output directory) + datm_stream_file : str + file name of usr_nl_datm_streams file to write to for user_mods creation + Methods ------- - create_tag + create_tag: create a tag for single point which is the site name or the "lon-lat" format if the site name does not exist. - create_domain_at_point + create_fileout_name: + creates a file name from a basename and a specified tag + create_domain_at_point: Create domain file at a single point. create_landuse_at_point: Create landuse file at a single point. @@ -33,22 +95,23 @@ class SinglePointCase(BaseCase): """ def __init__( - self, - plat, - plon, - site_name, - create_domain, - create_surfdata, - create_landuse, - create_datm, - overwrite_single_pft, - dominant_pft, - zero_nonveg_landunits, - uniform_snowpack, - no_saturation_excess, - output_dir + self, + plat, + plon, + site_name, + create_domain, + create_surfdata, + create_landuse, + create_datm, + create_user_mods, + overwrite_single_pft, + dominant_pft, + zero_nonveg_landunits, + uniform_snowpack, + no_saturation_excess, + output_dir, ): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm) + super().__init__(create_domain, create_surfdata, create_landuse, create_datm, create_user_mods) self.plat = plat self.plon = plon self.site_name = site_name @@ -58,12 +121,35 @@ def __init__( self.uniform_snowpack = uniform_snowpack self.no_saturation_excess = no_saturation_excess self.output_dir = output_dir + self.tag = None + self.fdomain_in = None + self.fdomain_out = None + self.fluse_in = None + self.fluse_out = None + self.fsurf_in = None + self.fsurf_out = None + self.fdatmdomain_in = None + self.fdatmdomain_out = None + self.datm_syr = None + self.datm_eyr = None + self.name_tpqw = None + self.name_prec = None + self.name_solar = None + self.dir_output_datm = None + self.dir_tpqw = None + self.tag_tpqw = None + self.dir_prec = None + self.tag_prec = None + self.dir_input_datm = None + self.tag_solar = None + self.dir_solar = None + self.datm_streams_file = None def create_tag(self): if self.site_name: self.tag = self.site_name else: - self.tag = str(self.plon) + "_" + str(self.plat) + self.tag = "{}_{}".format(str(self.plon), str(self.plat)) @staticmethod def create_fileout_name(filename, tag): @@ -72,24 +158,8 @@ def create_fileout_name(filename, tag): items = basename.split("_") today = date.today() today_string = today.strftime("%y%m%d") - new_string = ( - items[0] - + "_" - + items[2] - + "_" - + items[3] - + "_" - + items[4] - + "_" - + items[5] - + "_" - + items[6] - + "_" - + tag - + "_c" - + today_string - + ".nc" - ) + new_string = "{}_{}_c{}.nc".format("_".join([items[0]] + items[2:7]), tag, today_string) + return new_string def create_domain_at_point(self): @@ -108,16 +178,15 @@ def create_domain_at_point(self): wfile = os.path.join(self.output_dir, self.fdomain_out) f3.to_netcdf(path=wfile, mode="w") - print("Successfully created file (fdomain_out)" + self.fdomain_out) + print("Successfully created file (fdomain_out) " + self.fdomain_out) f2.close() f3.close() def create_landuse_at_point(self): print("----------------------------------------------------------------------") - print("Creating landuse file at ", self.plon, self.plat, ".") + print("Creating landuse file at ", self.plon, self.plat) # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord( - self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat f3 = f2.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") @@ -130,8 +199,7 @@ def create_landuse_at_point(self): # revert expand dimensions of YEAR year = np.squeeze(np.asarray(f3["YEAR"])) - x = xr.DataArray( - year, coords={"time": f3["time"]}, dims="time", name="YEAR") + x = xr.DataArray(year, coords={"time": f3["time"]}, dims="time", name="YEAR") x.attrs["units"] = "unitless" x.attrs["long_name"] = "Year of PFT data" f3["YEAR"] = x @@ -143,25 +211,26 @@ def create_landuse_at_point(self): wfile = os.path.join(self.output_dir, self.fluse_out) # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") - print("Successfully created file (luse_out)" + self.fluse_out, ".") + print("Successfully created file (fluse_out) " + self.fluse_out) f2.close() f3.close() def create_surfdata_at_point(self): print("----------------------------------------------------------------------") - print("Creating surface dataset file at ", self.plon, self.plat, ".") + print("Creating surface dataset file at ", self.plon, self.plat) # create 1d coordinate variables to enable sel() method filename = os.path.join(self.output_dir, self.fsurf_in) - f2 = self.create_1d_coord( - filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat f3 = f2.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") # expand dimensions f3 = f3.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) # update the plon and plat to match the surface data - self.plat = f3.coords['lsmlat'].values[0] - self.plon = f3.coords['lsmlon'].values[0] + # we do this so that if we create user_mods the PTS_LON and PTS_LAT in CIME match + # the surface data coordinates - which is required + self.plat = f3.coords["lsmlat"].values[0] + self.plon = f3.coords["lsmlon"].values[0] # modify surface data properties if self.overwrite_single_pft: @@ -172,10 +241,7 @@ def create_surfdata_at_point(self): f3["PCT_CROP"][:, :] = 0 f3["PCT_LAKE"][:, :] = 0.0 f3["PCT_WETLAND"][:, :] = 0.0 - f3["PCT_URBAN"][ - :, - :, - ] = 0.0 + f3["PCT_URBAN"][:, :, ] = 0.0 f3["PCT_GLACIER"][:, :] = 0.0 if self.uniform_snowpack: f3["STD_ELEV"][:, :] = 20.0 @@ -205,16 +271,15 @@ def create_surfdata_at_point(self): del f3.attrs["History_Log"] # mode 'w' overwrites file f3.to_netcdf(path=self.fsurf_out, mode="w") - print("Successfully created file (fsurf_out) :" + self.fsurf_out) + print("Successfully created file (fsurf_out): " + self.fsurf_out) f2.close() f3.close() def create_datmdomain_at_point(self): print("----------------------------------------------------------------------") - print("Creating DATM domain file at ", self.plon, self.plat, ".") + print("Creating DATM domain file at ", self.plon, self.plat) # create 1d coordinate variables to enable sel() method - filename = self.fdatmdomain_in - f2 = self.create_1d_coord(filename, "xc", "yc", "ni", "nj") + f2 = self.create_1d_coord(self.fdatmdomain_in, "xc", "yc", "ni", "nj") # extract gridcell closest to plon/plat f3 = f2.sel(ni=self.plon, nj=self.plat, method="nearest") # expand dimensions @@ -225,7 +290,7 @@ def create_datmdomain_at_point(self): f3.attrs["Created_from"] = self.fdatmdomain_in # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") - print("Successfully created file (fdatmdomain_out) :" + self.fdatmdomain_out) + print("Successfully created file (fdatmdomain_out): " + self.fdatmdomain_out) f2.close() f3.close() @@ -244,28 +309,39 @@ def extract_datm_at(self, file_in, file_out): f3.attrs["Created_from"] = file_in # mode 'w' overwrites file f3.to_netcdf(path=file_out, mode="w") - print("Successfully created file :" + file_out) + print("Successfully created file: " + file_out) f2.close() f3.close() def write_shell_commands(self, file): - # writes out shell commands for single-point runs - - file.write('! Change below line if you move the subset data directory') - file.write('\n' + './xmlchange CLM_USRDAT_DIR=' + self.out_dir + '\n') - file.write('\n' + "./xmlchange PTS_LON=" + str(self.plon) + '\n') - file.write('\n' + "./xmlchange PTS_LAT=" + str(self.plat) + '\n') - file.write('\n' + "./xmlchange MPILIB=mpi-serial" + '\n') + """ + writes out xml commands commands to a file (i.e. shell_commands) for single-point runs + + file - file connection to shell_commands file + """ + + # write_to_file surrounds text with newlines + self.write_to_file("! Change below line if you move the subset data directory", file) + self.write_to_file("./xmlchange {}={}".format(USRDAT_DIR, self.output_dir), file) + self.write_to_file("./xmlchange PTS_LON=".format(str(self.plon)), file) + self.write_to_file("./xmlchange PTS_LAT=".format(str(self.plat)), file) + self.write_to_file("./xmlchange MPILIB=mpi-serial", file) file.close() - def write_nl_commands(self, streamname, file): - line_mapalgo = streamname + ':mapalgo=none' - line_meshfile = streamname + ':meshfile=none' + def write_datm_streams_lines(self, streamname, datmfiles, file): + """ + writes out lines for the user_nl_datm_streams file for a specific DATM stream + for using subset DATM data at a single point - file.write("\n" + line_meshfile + "\n") - file.write("\n" + line_mapalgo + "\n") + streamname - stream name (e.g. TPQW) + datmfiles - comma-separated list (str) of DATM file names + file - file connection to user_nl_datm_streams file + """ + self.write_to_file("{}:datafiles={}".format(streamname, ','.join(datmfiles)), file) + self.write_to_file("{}:mapalgo=none".format(streamname), file) + self.write_to_file("{}:meshfile=none".format(streamname), file) - def create_datm_at_point(self, create_user_mods, datm_streams_file): + def create_datm_at_point(self): print("----------------------------------------------------------------------") print("Creating DATM files at ", self.plon, self.plat, ".") @@ -284,20 +360,21 @@ def create_datm_at_point(self, create_user_mods, datm_streams_file): dtag = ystr + "-" + mstr - fsolar = os.path.join(self.dir_solar, self.tag_solar + dtag + ".nc") - fsolar2 = os.path.join(self.tag_solar + self.tag + "." + dtag + ".nc") - fprecip = os.path.join(self.dir_input_datm, self.dir_prec, self.tag_prec + dtag + ".nc") - fprecip2 = os.path.join(self.tag_prec + self.tag + "." + dtag + ".nc") - ftpqw = os.path.join(self.dir_input_datm, self.dir_tpqw, self.tag_tpqw + dtag + ".nc") - ftpqw2 = os.path.join(self.tag_tpqw + self.tag + "." + dtag + ".nc") + fsolar = os.path.join(self.dir_input_datm, self.dir_solar, "{}{}.nc".format(self.tag_solar, dtag)) + fsolar2 = "{}{}.{}.nc".format(self.tag_solar, self.tag, dtag) + fprecip = os.path.join(self.dir_input_datm, self.dir_solar, "{}{}.nc".format(self.tag_prec, dtag)) + fprecip2 = "{}{}.{}.nc".format(self.tag_prec, self.tag, dtag) + ftpqw = os.path.join(self.dir_input_datm, self.dir_tpqw, "{}{}.nc".format(self.tag_tpqw, dtag)) + ftpqw2 = "{}{}.{}.nc".format(self.tag_tpqw, self.tag, dtag) outdir = os.path.join(self.output_dir, self.dir_output_datm) infile += [fsolar, fprecip, ftpqw] - outfile += [os.path.join(outdir, fsolar2), os.path.join(outdir, fprecip2), + outfile += [os.path.join(outdir, fsolar2), + os.path.join(outdir, fprecip2), os.path.join(outdir, ftpqw2)] - solarfiles.append(os.path.join("$CLM_USRDAT_DIR", self.dir_output_dtam, fsolar2)) - precfiles.append(os.path.join("$CLM_USRDAT_DIR", self.dir_output_dtam, fprecip2)) - tpqwfiles.append(os.path.join("$CLM_USRDAT_DIR", self.dir_output_dtam, ftpqw2)) + solarfiles.append(os.path.join("${}".format(USRDAT_DIR), self.dir_output_datm, fsolar2)) + precfiles.append(os.path.join("${}".format(USRDAT_DIR), self.dir_output_datm, fprecip2)) + tpqwfiles.append(os.path.join("${}".format(USRDAT_DIR), self.dir_output_datm, ftpqw2)) nm = len(infile) for n in range(nm): @@ -306,18 +383,11 @@ def create_datm_at_point(self, create_user_mods, datm_streams_file): file_out = outfile[n] self.extract_datm_at(file_in, file_out) - print("All DATM files are created in: " + outdir) + print("All DATM files are created in: " + self.output_dir) # write to user_nl_datm_streams if specified - if create_user_mods: - solarfile_line = self.name_solar + ':datafiles=' + ','.join(solarfiles) - precfile_line = self.name_prec + ':datafiles=' + ','.join(precfiles) - tpqwfile_line = self.name_tpqw + ':datafiles=' + ','.join(tpqwfiles) - - with open(datm_streams_file, 'a') as user_nl_file: - user_nl_file.write('\n' + solarfile_line + '\n') - self.write_nl_commands(self.name_solar, user_nl_file) - user_nl_file.write('\n' + precfile_line + '\n') - self.write_nl_commands(self.name_prec, user_nl_file) - user_nl_file.write('\n' + tpqwfile_line + '\n') - self.write_nl_commands(self.name_tpqw, user_nl_file) + if self.create_user_mods: + with open(self.datm_streams_file, "a") as file: + self.write_datm_streams_lines(self.name_solar, solarfiles, file) + self.write_datm_streams_lines(self.name_prec, precfiles, file) + self.write_datm_streams_lines(self.name_tpqw, tpqwfiles, file) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 91cc3940d2..f498139430 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -41,31 +41,24 @@ # Import libraries from __future__ import print_function -from ctsm.site_and_regional.regional_case import RegionalCase -from ctsm.site_and_regional.single_point_case import SinglePointCase -from ctsm.site_and_regional.base_case import BaseCase -from ctsm.path_utils import path_to_ctsm_root -from ctsm import add_cime_to_path - import sys import os -import string import logging import subprocess import argparse import configparser -import numpy as np -import xarray as xr - from datetime import date from getpass import getuser from logging.handlers import RotatingFileHandler -from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +from argparse import ArgumentParser + +from ctsm.site_and_regional.base_case import USRDAT_DIR +from ctsm.site_and_regional.regional_case import RegionalCase +from ctsm.site_and_regional.single_point_case import SinglePointCase +from ctsm.path_utils import path_to_ctsm_root -# Get the ctsm util tools and then the cime tools. -_CTSM_PYTHON = os.path.abspath(os.path.join( - os.path.dirname(__file__), "..", "..", 'python')) +_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", 'python')) sys.path.insert(1, _CTSM_PYTHON) # -- Globals and Default Values --- @@ -331,9 +324,7 @@ def str2bool(v): elif v.lower() in ("no", "false", "f", "n", "0"): return False else: - raise argparse.ArgumentTypeError( - "Boolean value expected. [true or false] or [y or n]" - ) + raise argparse.ArgumentTypeError("Boolean value expected. [true or false] or [y or n]") def plat_type(x): @@ -350,9 +341,7 @@ def plat_type(x): """ x = float(x) if (x < -90) or (x > 90): - raise argparse.ArgumentTypeError( - "ERROR: Latitude should be between -90 and 90." - ) + raise argparse.ArgumentTypeError("ERROR: Latitude should be between -90 and 90.") return x @@ -370,13 +359,11 @@ def plon_type(x): """ x = float(x) if (-180 < x) and (x < 0): - print("lon is :", x) + print("lon is: ", x) x = x % 360 - print("after modulo lon is :", x) + print("after modulo lon is: ", x) if (x < 0) or (x > 360): - raise argparse.ArgumentTypeError( - "ERROR: Latitude of single point should be between 0 and 360 or -180 and 180." - ) + raise argparse.ArgumentTypeError("ERROR: Latitude of single point should be between 0 and 360 or -180 and 180.") return x @@ -387,11 +374,7 @@ def get_git_sha(): try: # os.abspath(__file__) - sha = ( - subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) - .strip() - .decode() - ) + sha = (subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).strip().decode()) except subprocess.CalledProcessError: sha = "NOT-A-GIT-REPOSITORY" return sha @@ -407,14 +390,9 @@ def setup_logging(log_file, log_level): # setup log file one_mb = 1000000 - handler = logging.handlers.RotatingFileHandler( - log_file, maxBytes=one_mb, backupCount=10 - ) + handler = logging.handlers.RotatingFileHandler(log_file, maxBytes=one_mb, backupCount=10) - fmt = logging.Formatter( - "%(asctime)s %(name)-12s %(levelname)-8s %(message)s", - datefmt="%y-%m-%d %H:%M:%S", - ) + fmt = logging.Formatter("%(asctime)s %(name)-12s %(levelname)-8s %(message)s", datefmt="%y-%m-%d %H:%M:%S") handler.setFormatter(fmt) root_logger.addHandler(handler) @@ -482,8 +460,7 @@ def flush(self): def main(): - - # parse arguments + # parse command-line arguments args = get_parser().parse_args() # parse defaults file @@ -516,29 +493,29 @@ def main(): # data file if args.crop_flag: num_pft = "78" - fsurf_in = defaults.get('surfdat', 'surfdat_78pft') - fluse_in = defaults.get('landuse', 'landuse_78pft') + fsurf_in = defaults.get("surfdat", "surfdat_78pft") + fluse_in = defaults.get("landuse", "landuse_78pft") else: num_pft = "16" - fsurf_in = defaults.get('surfdat', 'surfdat_16pft') - fluse_in = defaults.get('landuse', 'landuse_16pft') + fsurf_in = defaults.get("surfdat", "surfdat_16pft") + fluse_in = defaults.get("landuse", "landuse_16pft") - print("crop_flag = " + args.crop_flag.__str__() + " => num_pft =" + num_pft) + print("crop_flag = {} => num_pft = {}".format(args.crop_flag.__str__(), num_pft)) # -- Specify input and output directories and files - # top-level output directory + # Top-level output directory if not os.path.isdir(args.out_dir): os.mkdir(args.out_dir) - # datm data + # DATM data dir_output_datm = "datmdata" - dir_input_datm = defaults.get('datm_gswp3', 'dir') + dir_input_datm = defaults.get("datm_gswp3", "dir") if args.create_datm: if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)): os.mkdir(os.path.join(args.out_dir, dir_output_datm)) - print("dir_input_datm : ", dir_input_datm) - print("dir_output_datm : ", os.path.join(args.out_dir, dir_output_datm)) + print("dir_input_datm : ", dir_input_datm) + print("dir_output_datm: ", os.path.join(args.out_dir, dir_output_datm)) # -- Set up user mods directories and base files if args.create_user_mods: @@ -551,38 +528,37 @@ def main(): if args.create_surfdata or args.create_landuse: nl_clm_base = os.path.join(cesmroot, "cime_config/user_nl_clm") nl_clm = os.path.join(args.user_mods_dir, "user_nl_clm") - with open(nl_clm_base, 'r') as basefile, open(nl_clm, 'w') as userfile: + with open(nl_clm_base, "r") as basefile, open(nl_clm, "w") as user_file: for line in basefile: - userfile.write(line) + user_file.write(line) # -- Create empty user_nl_datm_streams file if args.create_datm: - nl_datm_base = os.path.join( - cesmroot, "components/cdeps/datm/cime_config/user_nl_datm_streams") + nl_datm_base = os.path.join(cesmroot, "components/cdeps/datm/cime_config/user_nl_datm_streams") nl_datm = os.path.join(args.user_mods_dir, "user_nl_datm_streams") - with open(nl_datm_base, 'r') as basefile, open(nl_datm, 'w') as userfile: - for line in basefile: - userfile.write(line) + with open(nl_datm_base, "r") as base_file, open(nl_datm, 'w') as user_file: + for line in base_file: + user_file.write(line) # Default files - dir_inputdata = defaults.get('main', 'clmforcingindir') - dir_inputsurf = defaults.get('surfdat', 'dir') - dir_inputluse = defaults.get('landuse', 'dir') - fdomain_in = os.path.join(dir_inputdata, defaults.get('domain', 'file')) - fdatmdomain_in = os.path.join(dir_input_datm, defaults.get('datm_gswp3', 'domain')) - datm_solardir = defaults.get('datm_gswp3', 'solardir') - datm_precdir = defaults.get('datm_gswp3', 'precdir') - datm_tpqwdir = defaults.get('datm_gswp3', 'tpqwdir') - datm_solartag = defaults.get('datm_gswp3', 'solartag') - datm_prectag = defaults.get('datm_gswp3', 'prectag') - datm_tpqwtag = defaults.get('datm_gswp3', 'tpqwtag') - datm_solarname = defaults.get('datm_gswp3', 'solarname') - datm_precname = defaults.get('datm_gswp3', 'precname') - datm_tpqwname = defaults.get('datm_gswp3', 'tpqwname') + dir_inputdata = defaults.get("main", "clmforcingindir") + dir_inputsurf = defaults.get("surfdat", "dir") + dir_inputluse = defaults.get("landuse", "dir") + fdomain_in = os.path.join(dir_inputdata, defaults.get("domain", "file")) + fdatmdomain_in = os.path.join(dir_input_datm, defaults.get("datm_gswp3", "domain")) + datm_solardir = defaults.get("datm_gswp3", "solardir") + datm_precdir = defaults.get("datm_gswp3", "precdir") + datm_tpqwdir = defaults.get("datm_gswp3", "tpqwdir") + datm_solartag = defaults.get("datm_gswp3", "solartag") + datm_prectag = defaults.get("datm_gswp3", "prectag") + datm_tpqwtag = defaults.get("datm_gswp3", "tpqwtag") + datm_solarname = defaults.get("datm_gswp3", "solarname") + datm_precname = defaults.get("datm_gswp3", "precname") + datm_tpqwname = defaults.get("datm_gswp3", "tpqwname") if args.run_type == "point": print("----------------------------------------------------------------------------") - print( "This script extracts a single point from the global CTSM datasets.") + print("This script extracts a single point from the global CTSM datasets.") # -- Create SinglePoint Object single_point = SinglePointCase( @@ -593,22 +569,26 @@ def main(): args.create_surfdata, args.create_landuse, args.create_datm, + args.create_user_mods, args.overwrite_single_pft, args.dom_pft, args.zero_nonveg, args.uni_snow, args.no_saturation_excess, - args.out_dir + args.out_dir, ) single_point.create_tag() + if single_point.create_user_mods and single_point.create_datm: + single_point.datm_streams_file = nl_datm + # -- Create CTSM domain file if single_point.create_domain: # -- Specify land domain file --------------------------------- single_point.fdomain_in = os.path.join(dir_inputdata, fdomain_in) single_point.fdomain_out = single_point.add_tag_to_filename(fdomain_in, single_point.tag) - print("fdomain_in :", single_point.fdomain_in) - print("fdomain_out :", os.path.join(single_point.output_dir, single_point.fdomain_out)) + print("fdomain_in: ", single_point.fdomain_in) + print("fdomain_out: ", os.path.join(single_point.output_dir, single_point.fdomain_out)) single_point.create_domain_at_point() # -- Create CTSM surface data file @@ -616,15 +596,15 @@ def main(): # -- Specify surface file --------------------------------- single_point.fsurf_in = os.path.join(dir_inputdata, dir_inputsurf, fsurf_in) single_point.fsurf_out = single_point.create_fileout_name(fsurf_in, single_point.tag) - print("fsurf_in :", single_point.fsurf_in) - print("fsurf_out :", single_point.fsurf_out) + print("fsurf_in: ", single_point.fsurf_in) + print("fsurf_out: ", single_point.fsurf_out) single_point.create_surfdata_at_point() # write to user_nl_clm if specified if args.create_user_mods: nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") - line = 'fsurdat = ' + "'$CLM_USRDAT_DIR/" + single_point.fsurf_out + "'" - nl_clm.write('\n' + line + '\n') + line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, single_point.fsurf_out)) + single_point.write_to_file(line, nl_clm) nl_clm.close() # -- Create CTSM transient landuse data file @@ -632,26 +612,25 @@ def main(): # -- Specify surface file --------------------------------- single_point.fluse_in = os.path.join(dir_inputdata, dir_inputluse, fluse_in) single_point.fluse_out = single_point.create_fileout_name(fluse_in, single_point.tag) - print("fluse_in :", single_point.fluse_in) - print("fluse_out :", single_point.fluse_out) + print("fluse_in: ", single_point.fluse_in) + print("fluse_out: ", single_point.fluse_out) single_point.create_landuse_at_point() # write to user_nl_clm data if specified - if args.create_user_mods: + if single_point.create_user_mods: nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") - line = 'fsurdat = ' + "'$CLM_USRDAT_DIR/" + single_point.fluse_out + "'" - nl_clm.write('\n' + line + '\n') + line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, single_point.fluse_out)) + single_point.write_to_file(line, nl_clm) nl_clm.close() # -- Create single point atmospheric forcing data if single_point.create_datm: # -- Specify datm and subset domain file --------------------------------- single_point.fdatmdomain_in = os.path.join(dir_input_datm, fdatmdomain_in) - single_point.fdatmdomain_out = os.path.join(dir_output_datm, - single_point.add_tag_to_filename(single_point.fdatmdomain_in, - single_point.tag)) - print("fdatmdomain_in : ", single_point.fdatmdomain_in) - print("fdatmdomain out : ", single_point.fdatmdomain_out) + datm_file = single_point.add_tag_to_filename(single_point.fdatmdomain_in, single_point.tag) + single_point.fdatmdomain_out = os.path.join(dir_output_datm, datm_file) + print("fdatmdomain_in: ", single_point.fdatmdomain_in) + print("fdatmdomain out: ", single_point.fdatmdomain_out) single_point.create_datmdomain_at_point() # -- Specify DATM directories, tags, and stream names @@ -668,11 +647,11 @@ def main(): single_point.name_solar = datm_solarname single_point.name_prec = datm_precname single_point.name_tpqw = datm_tpqwname - single_point.create_datm_at_point(args.create_user_mods, nl_datm) + single_point.create_datm_at_point() # -- Write shell commands - if args.create_user_mods: - shell_commands_file = open(os.path.join(args.user_mods_dir,"shell_commands"), 'w') + if single_point.create_user_mods: + shell_commands_file = open(os.path.join(args.user_mods_dir, "shell_commands"), "w") single_point.write_shell_commands(shell_commands_file) print("Successfully ran script for single point.") @@ -680,7 +659,7 @@ def main(): elif args.run_type == "reg": print("----------------------------------------------------------------------------") - print( "This script extracts a region from the global CTSM datasets.") + print("This script extracts a region from the global CTSM datasets.") # -- Create Region Object region = RegionalCase( @@ -693,53 +672,54 @@ def main(): args.create_surfdata, args.create_landuse, args.create_datm, + args.create_user_mods, + args.out_dir, ) region.create_tag() - # -- Set time stamp - command = 'date "+%y%m%d"' - x2 = subprocess.Popen(command, stdout=subprocess.PIPE, shell="True") - x = x2.communicate() - timetag = x[0].strip() - print(timetag) - - # -- Specify land domain file --------------------------------- - fdomain_out = os.path.join(args.out_dir, "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc") - - # SinglePointCase.set_fdomain (fdomain) - region.fdomain_in = fdomain_in - region.fdomain_out = fdomain_out - - # -- Specify surface data file -------------------------------- - fsurf_out = os.path.join( - args.out_dir, - "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" - + region.tag - + "_c170824.nc" - ) - region.fsurf_in = fsurf_in - region.fsurf_out = fsurf_out - - # -- Specify landuse file ------------------------------------- - fluse_out = os.path.join( - args.out_dir, - "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" - + region.tag - + ".c170824.nc" - ) - region.fluse_in = fluse_in - region.fluse_out = fluse_out - # -- Create CTSM domain file if region.create_domain: + # -- Specify land domain file --------------------------------- + region.fdomain_in = os.path.join(dir_inputdata, fdomain_in) + region.fdomain_out = os.path.join(args.out_dir, "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc") + print("fdomain_in: ", region.fdomain_in) + print("fdomain_out: ", os.path.join(region.output_dir, region.fdomain_out)) region.create_domain_at_reg() # -- Create CTSM surface data file if region.create_surfdata: + # -- Specify surface file --------------------------------- + region.fsurf_in = os.path.join(dir_inputdata, dir_inputsurf, fsurf_in) + region.fsurf_out = os.path.join(args.out_dir, "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" + region.tag + + "_c170824.nc") + print("fsurf_in: ", region.fsurf_in) + print("fsurf_out: ", region.fsurf_out) region.create_surfdata_at_reg() + # write to user_nl_clm if specified + if args.create_user_mods: + nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") + line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, region.fsurf_out)) + region.write_to_file(line, nl_clm) + nl_clm.close() + # -- Create CTSM transient landuse data file if region.create_landuse: + # -- Specify surface file --------------------------------- + region.fluse_in = os.path.join(dir_inputdata, dir_inputluse, fluse_in) + region.fluse_out = os.path.join(args.out_dir, + "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" + + region.tag + ".c170824.nc") + print("fluse_in: ", region.fluse_in) + print("fluse_out: ", region.fluse_out) region.create_landuse_at_reg() + + # write to user_nl_clm data if specified + if region.create_user_mods: + nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") + line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, region.fluse_out)) + region.write_to_file(line, nl_clm) + nl_clm.close() + print("Successfully ran script for a regional case.") exit() From 910a21a11f5a3113a94bdf4ef22ec784144cb41c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 10 Dec 2021 14:43:33 -0700 Subject: [PATCH 031/223] bunch of changes --- python/ctsm/git_utils.py | 40 +++++ python/ctsm/site_and_regional/base_case.py | 100 +++++++---- .../site_and_regional/single_point_case.py | 17 +- python/ctsm/subset_data.py | 170 +++++++----------- python/ctsm/utils.py | 34 +++- 5 files changed, 218 insertions(+), 143 deletions(-) create mode 100644 python/ctsm/git_utils.py diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py new file mode 100644 index 0000000000..e676f3ccaf --- /dev/null +++ b/python/ctsm/git_utils.py @@ -0,0 +1,40 @@ +"""General-purpose utility functions""" + +import logging +import subprocess + +logger = logging.getLogger(__name__) + +def get_git_short_hash(): + """ + Returns Git short SHA for the currect directory. + """ + try: + + # os.abspath(__file__) + sha = ( + subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) + .strip() + .decode() + ) + except subprocess.CalledProcessError: + sha = "NOT-A-GIT-REPOSITORY" + return sha + + +def get_git_long_hash(): + """ + Returns Git long SHA for the currect directory. + """ + try: + + # os.abspath(__file__) + sha = ( + subprocess.check_output(["git", "rev-parse", "HEAD"]) + .strip() + .decode() + ) + except subprocess.CalledProcessError: + sha = "NOT-A-GIT-REPOSITORY" + return sha + diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 5b8912f648..4b5fb09dce 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -1,13 +1,24 @@ +""" +This module includes the definition for a parent class for SinglePointCase +and RegionalCase. The common functionalities of SinglePointCase and +RegionalCase are defined in this Class. +""" +#-- Import libraries + +#-- standard libraries import os import logging -import numpy as np -import xarray as xr import subprocess from datetime import date from getpass import getuser -myname = getuser() +#-- 3rd party libraries +import numpy as np +import xarray as xr + +#-- import local classes for this script +from ctsm.git_utils import get_git_short_hash logger = logging.getLogger(__name__) @@ -36,15 +47,26 @@ class BaseCase: add_tag_to_filename(filename, tag) add a tag and timetag to a filename ending with [._]cYYMMDD.nc or [._]YYMMDD.nc + + update_metadata(nc) + Class method for adding some new attributes (such as date, username) and + remove the old attributes from the netcdf file. """ def __init__(self, create_domain, create_surfdata, create_landuse, create_datm): + """ + Initializes BaseCase with the given arguments. + + """ self.create_domain = create_domain self.create_surfdata = create_surfdata self.create_landuse = create_landuse self.create_datm = create_datm def __str__(self): + """ + Converts ingredients of the BaseCase to string for printing. + """ return ( str(self.__class__) + "\n" @@ -59,33 +81,52 @@ def __str__(self): @staticmethod def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): """ - lon_varname : variable name that has 2d lon - lat_varname : variable name that has 2d lat - x_dim: dimension name in X -- lon - y_dim: dimension name in Y -- lat + Create 1d coordinate variables for a netcdf file to enable sel() method + + Args + filename (str) : name of the netcdf file + lon_varname (str) : variable name that has 2d lon + lat_varname (str) : variable name that has 2d lat + x_dim (str) : dimension name in X -- lon + y_dim (str): dimension name in Y -- lat + + Returns: + f_out (xarray Dataset): Xarray Dataset with 1-d coords + """ logging.debug("Open file: " + filename) - f1 = xr.open_dataset(filename) + f_in = xr.open_dataset(filename) # create 1d coordinate variables to enable sel() method - lon0 = np.asarray(f1[lon_varname][0, :]) - lat0 = np.asarray(f1[lat_varname][:, 0]) + lon0 = np.asarray(f_in[lon_varname][0, :]) + lat0 = np.asarray(f_in[lat_varname][:, 0]) lon = xr.DataArray(lon0, name="lon", dims=x_dim, coords={x_dim: lon0}) lat = xr.DataArray(lat0, name="lat", dims=y_dim, coords={y_dim: lat0}) - f2 = f1.assign({"lon": lon, "lat": lat}) + f_out = f_in.assign({"lon": lon, "lat": lat}) - f2.reset_coords([lon_varname, lat_varname]) - f1.close() - return f2 + f_out.reset_coords([lon_varname, lat_varname]) + f_in.close() + return f_out @staticmethod def add_tag_to_filename(filename, tag): """ Add a tag and replace timetag of a filename - # Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc - # Add the tag to just before that ending part - # and change the ending part to the current time tag + Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc + Add the tag to just before that ending part + and change the ending part to the current time tag. + + Args + filename (str) : file name + tag (str) : string of a tag to be added to the end of filename + + Raises: + Error: When it cannot find . and _ in the filename. + + Returns: + fname_out (str): filename with the tag and date string added + """ basename = os.path.basename(filename) cend = -10 @@ -96,18 +137,23 @@ def add_tag_to_filename(filename, tag): os.abort() today = date.today() today_string = today.strftime("%y%m%d") - return basename[:cend] + "_" + tag + "_c" + today_string + ".nc" + fname_out = basename[:cend] + "_" + tag + "_c" + today_string + ".nc" + return fname_out def update_metadata(self, nc): + """ + Class method for adding some new attributes (such as date, username) and + remove the old attributes from the netcdf file. + """ # update attributes today = date.today() today_string = today.strftime("%Y-%m-%d") # get git hash - sha = self.get_git_sha() + sha = get_git_short_hash() nc.attrs["Created_on"] = today_string - nc.attrs["Created_by"] = myname + nc.attrs["Created_by"] = getuser() nc.attrs["Created_with"] = os.path.abspath(__file__) + " -- " + sha # delete unrelated attributes if they exist @@ -131,17 +177,3 @@ def update_metadata(self, nc): # for attr, value in attr_list.items(): # print (attr + " = "+str(value)) - @staticmethod - def get_git_sha(): - """ - Returns Git short SHA for the currect directory. - """ - try: - sha = ( - subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) - .strip() - .decode() - ) - except subprocess.CalledProcessError: - sha = "NOT-A-GIT-REPOSITORY" - return sha diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 17e103a120..4fbdb9a7df 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -1,9 +1,19 @@ -from ctsm.site_and_regional.base_case import BaseCase +""" +This module includes the definition for singlepoint class. +""" + +#-- Import libraries import os import logging + +from datetime import date + +#-- 3rd party libraries import numpy as np import xarray as xr -from datetime import date + +#-- import local classes for this script +from ctsm.site_and_regional.base_case import BaseCase logger = logging.getLogger(__name__) @@ -30,10 +40,13 @@ class SinglePointCase(BaseCase): create_domain_at_point Create domain file at a single point. + create_landuse_at_point: Create landuse file at a single point. + create_surfdata_at_point: Create surface dataset at a single point. + create_datmdomain_at_point: Create DATM domain file at a single point. """ diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index bf8e0c1b36..427bc51ae2 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -11,11 +11,8 @@ ncar_pylib ------------------------------------------------------------------- -To see the available options for single point cases: - ./subset_data.py point --help - -To see the available options for regional cases: - ./subset_data.py reg --help +To see the available options for single point or regional cases: + ./subset_data.py --help ------------------------------------------------------------------- This script extracts domain files, surface dataset, and DATM files @@ -60,56 +57,58 @@ files, the appropriate flags should be used. ------------------------------------------------------------------- To run the script for a single point: - ./subset_data.py point --help + ./subset_data.py point To run the script for a region: - ./subset_data.py reg --help + ./subset_data.py reg To remove NPL from your environment on Cheyenne/Casper: deactivate ------------------------------------------------------------------- """ -# TODO -# Automatic downloading of missing files if they are missing +# TODO [NS]: +# -[] Automatic downloading of missing files if they are missing # default 78 pft vs 16 pft -# Import libraries -from __future__ import print_function +#-- Import libraries -import sys +#-- standard libraries import os +import sys import string import logging -import subprocess import argparse - -import numpy as np -import xarray as xr import textwrap +import subprocess from datetime import date from getpass import getuser -from logging.handlers import RotatingFileHandler from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +#-- 3rd party libraries +import numpy as np +import xarray as xr +#-- import local classes for this script from ctsm.site_and_regional.base_case import BaseCase from ctsm.site_and_regional.single_point_case import SinglePointCase from ctsm.site_and_regional.regional_case import RegionalCase -from ctsm.ctsm_logging import ( - setup_logging_pre_config, - add_logging_args, - process_logging_args, -) +from ctsm.utils import str2bool -logger = logging.getLogger(__name__) +#-- import ctsm logging flags +from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args +logger = logging.getLogger(__name__) def get_parser(): """ Get the parser object for subset_data.py script. + + Returns: + parser (ArgumentParser): ArgumentParser which includes all the parser information. + """ parser = ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter @@ -314,7 +313,7 @@ def get_parser(): nargs="?", const=True, required=False, - default=False, + default=True, ) subparser.add_argument( "--dompft", @@ -349,34 +348,6 @@ def get_parser(): return parser -def str2bool(v): - """ - Function for converting different forms of - command line boolean strings to boolean value. - - Args: - v (str): String bool input - - Raises: - if the argument is not an acceptable boolean string - (such as yes or no ; true or false ; y or n ; t or f ; 0 or 1). - argparse.ArgumentTypeError: The string should be one of the mentioned values. - - Returns: - bool: Boolean value corresponding to the input. - """ - if isinstance(v, bool): - return v - if v.lower() in ("yes", "true", "t", "y", "1"): - return True - elif v.lower() in ("no", "false", "f", "n", "0"): - return False - else: - raise argparse.ArgumentTypeError( - "Boolean value expected. [true or false] or [y or n]" - ) - - def plat_type(x): """ Function to define lat type for the parser @@ -428,23 +399,6 @@ def plon_type(x): return x -def get_git_sha(): - """ - Returns Git short SHA for the currect directory. - """ - try: - - # os.abspath(__file__) - sha = ( - subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) - .strip() - .decode() - ) - except subprocess.CalledProcessError: - sha = "NOT-A-GIT-REPOSITORY" - return sha - - def main(): # -- add logging flags from ctsm_logging @@ -456,21 +410,12 @@ def main(): process_logging_args(args) - # --------------------------------- # - today = date.today() today_string = today.strftime("%Y%m%d") myname = getuser() - pwd = os.getcwd() - # log_file = os.path.join(pwd, today_string + ".log") - - # log_level = logging.DEBUG - # setup_logging(log_file, log_level) - # log = logging.getLogger(__name__) - logging.info("User = " + myname) logging.info("Current directory = " + pwd) @@ -490,10 +435,13 @@ def main(): # -- Create regional CLM domain file create_domain = args.create_domain + # -- Create CLM surface data file create_surfdata = args.create_surfdata + # -- Create CLM surface data file create_landuse = args.create_landuse + # -- Create single point DATM atmospheric forcing data create_datm = args.create_datm datm_syr = args.datm_syr @@ -525,10 +473,10 @@ def main(): uniform_snowpack, saturation_excess, ) + single_point.create_tag() logging.debug(single_point) - # output_to_logger (single_point) if crop_flag: num_pft = "78" @@ -558,16 +506,16 @@ def main(): logging.info("dir_output_datm : " + dir_output_datm) # # -- Set time stamp - today = date.today() - timetag = today.strftime("%y%m%d") + #today = date.today() + #timetag = today.strftime("%y%m%d") # -- Specify land domain file --------------------------------- fdomain_in = os.path.join( dir_inputdata, "share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc" ) - fdomain_out = dir_output + single_point.add_tag_to_filename( + fdomain_out = os.path.join(dir_output , single_point.add_tag_to_filename( fdomain_in, single_point.tag - ) + )) single_point.fdomain_in = fdomain_in single_point.fdomain_out = fdomain_out logging.info("fdomain_in : " + fdomain_in) # @@ -586,9 +534,9 @@ def main(): ) # fsurf_out = dir_output + single_point.add_tag_to_filename(fsurf_in, single_point.tag) # remove res from filename for singlept - fsurf_out = dir_output + single_point.create_fileout_name( + fsurf_out = os.path.join( dir_output , single_point.create_fileout_name( fsurf_in, single_point.tag - ) + )) single_point.fsurf_in = fsurf_in single_point.fsurf_out = fsurf_out @@ -653,7 +601,13 @@ def main(): exit() elif args.run_type == "reg": - logging.info("Running the script for the region") + logging.info( + "----------------------------------------------------------------------------" + ) + logging.info( + "This script extracts a single point from the global CTSM inputdata datasets." + ) + # -- Specify region to extract lat1 = args.lat1 lat2 = args.lat2 @@ -663,10 +617,13 @@ def main(): # -- Create regional CLM domain file create_domain = args.create_domain + # -- Create CLM surface data file create_surfdata = args.create_surfdata + # -- Create CLM surface data file create_landuse = args.create_landuse + # -- Create DATM atmospheric forcing data create_datm = args.create_datm @@ -686,6 +643,8 @@ def main(): create_datm, ) + region.create_tag() + logging.debug(region) if crop_flag: @@ -695,8 +654,6 @@ def main(): logging.debug("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) - region.create_tag() - # -- Set input and output filenames # -- Specify input and output directories dir_output = "/glade/scratch/" + myname + "/region/" @@ -707,50 +664,53 @@ def main(): dir_clm_forcedata = "/glade/p/cgd/tss/CTSM_datm_forcing_data/" # -- Set time stamp - command = 'date "+%y%m%d"' - x2 = subprocess.Popen(command, stdout=subprocess.PIPE, shell="True") - x = x2.communicate() - timetag = x[0].strip() - logging.info(timetag) + #today = date.today() + #timetag = today.strftime("%y%m%d") # -- Specify land domain file --------------------------------- - fdomain_in = ( - dir_inputdata + "share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc" + fdomain_in = os.path.join( + dir_inputdata , "share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc" ) - fdomain_out = ( - dir_output + "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc" + fdomain_out = os.path.join( + dir_output , "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc" ) # SinglePointCase.set_fdomain (fdomain) region.fdomain_in = fdomain_in region.fdomain_out = fdomain_out + logging.info("fdomain_in : " + fdomain_in) + logging.info("fdomain_out : " + fdomain_out) # -- Specify surface data file -------------------------------- - fsurf_in = ( + fsurf_in = os.path.join( dir_inputdata - + "lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc" + , "lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc" ) - fsurf_out = ( + fsurf_out = os.path.join( dir_output - + "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" + , "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" + region.tag + "_c170824.nc" ) region.fsurf_in = fsurf_in region.fsurf_out = fsurf_out + logging.info("fsurf_in : " + fdomain_in) + logging.info("fsurf_out : " + fdomain_out) # -- Specify landuse file ------------------------------------- - fluse_in = ( + fluse_in = os.path.join( dir_inputdata - + "lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc" + , "lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc" ) - fluse_out = ( + fluse_out = os.path.join( dir_output - + "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" + , "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" + region.tag + ".c170824.nc" ) region.fluse_in = fluse_in region.fluse_out = fluse_out + logging.info("fluse_in : " + fdomain_in) + logging.info("fluse_out : " + fdomain_out) # -- Create CTSM domain file if create_domain: diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 44cce0cccf..8f0fe78400 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -7,6 +7,7 @@ logger = logging.getLogger(__name__) + def abort(errmsg): """Abort the program with the given error message @@ -15,7 +16,8 @@ def abort(errmsg): if logger.isEnabledFor(logging.DEBUG): pdb.set_trace() - sys.exit('ERROR: {}'.format(errmsg)) + sys.exit("ERROR: {}".format(errmsg)) + def fill_template_file(path_to_template, path_to_final, substitutions): """Given a template file (based on python's template strings), write a copy of the @@ -31,5 +33,33 @@ def fill_template_file(path_to_template, path_to_final, substitutions): template_file_contents = template_file.read() template = string.Template(template_file_contents) final_file_contents = template.substitute(substitutions) - with open(path_to_final, 'w') as final_file: + with open(path_to_final, "w") as final_file: final_file.write(final_file_contents) + + +def str2bool(v): + """ + Function for converting different forms of + command line boolean strings to boolean value. + + Args: + v (str): String bool input + + Raises: + if the argument is not an acceptable boolean string + (such as yes or no ; true or false ; y or n ; t or f ; 0 or 1). + argparse.ArgumentTypeError: The string should be one of the mentioned values. + + Returns: + bool: Boolean value corresponding to the input. + """ + if isinstance(v, bool): + return v + if v.lower() in ("yes", "true", "t", "y", "1"): + return True + elif v.lower() in ("no", "false", "f", "n", "0"): + return False + else: + raise argparse.ArgumentTypeError( + "Boolean value expected. [true or false] or [y or n]" + ) From de7ef435a09e1f9e707963ccf9300b04308b660a Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Mon, 13 Dec 2021 12:24:20 -0700 Subject: [PATCH 032/223] fixing git sha --- python/ctsm/site_and_regional/base_case.py | 2 +- python/ctsm/site_and_regional/single_point_case.py | 2 +- python/ctsm/subset_data.py | 5 +++-- tools/site_and_regional/default_data.cfg | 6 +++--- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index ffd09db37e..12197b2e12 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -132,7 +132,7 @@ def get_git_sha(): Returns Git short SHA for the current directory. """ try: - sha = (subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).strip().decode()) + sha = (subprocess.check_output(["git", "-C", os.path.dirname(__file__), "rev-parse", "--short", "HEAD"]).strip().decode()) except subprocess.CalledProcessError: sha = "NOT-A-GIT-REPOSITORY" return sha diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index cbb7f5c4c9..f574984952 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -363,7 +363,7 @@ def create_datm_at_point(self): fsolar = os.path.join(self.dir_input_datm, self.dir_solar, "{}{}.nc".format(self.tag_solar, dtag)) fsolar2 = "{}{}.{}.nc".format(self.tag_solar, self.tag, dtag) - fprecip = os.path.join(self.dir_input_datm, self.dir_solar, "{}{}.nc".format(self.tag_prec, dtag)) + fprecip = os.path.join(self.dir_input_datm, self.dir_prec, "{}{}.nc".format(self.tag_prec, dtag)) fprecip2 = "{}{}.{}.nc".format(self.tag_prec, self.tag, dtag) ftpqw = os.path.join(self.dir_input_datm, self.dir_tpqw, "{}{}.nc".format(self.tag_tpqw, dtag)) ftpqw2 = "{}{}.{}.nc".format(self.tag_tpqw, self.tag, dtag) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 3ad0e272c2..f6b5a744cf 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -427,8 +427,9 @@ def main(): process_logging_args(args) # parse defaults file + cesmroot = path_to_ctsm_root() defaults = configparser.ConfigParser() - defaults.read(DEFAULTS_FILE) + defaults.read(os.path.join(cesmroot, 'tools/site_and_regional', DEFAULTS_FILE)) # --------------------------------- # @@ -490,7 +491,7 @@ def main(): if not os.path.isdir(args.user_mods_dir): os.mkdir(args.user_mods_dir) - cesmroot = path_to_ctsm_root() + # -- Create empty user_nl_clm file if args.create_surfdata or args.create_landuse: diff --git a/tools/site_and_regional/default_data.cfg b/tools/site_and_regional/default_data.cfg index 0fde22b129..f689c99044 100644 --- a/tools/site_and_regional/default_data.cfg +++ b/tools/site_and_regional/default_data.cfg @@ -1,5 +1,5 @@ [main] -clmforcingindir = /Users/afoster/Documents/ctsm +clmforcingindir = /glade/p/cesmdata/inputdata [datm_gswp3] dir = /glade/p/cgd/tss/CTSM_datm_forcing_data/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 @@ -15,7 +15,7 @@ precname = CLMGSWP3v1.Precip tpqwname = CLMGSWP3v1.TPQW [surfdat] -dir = surfdata +dir = lnd/clm2/surfdata_map/release-clm5.0.18 surfdat_16pft = surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc surfdat_78pft = surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc @@ -25,4 +25,4 @@ landuse_16pft = landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-20 landuse_78pft = landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc [domain] -file = domain.lnd.fv0.9x1.25_gx1v7.151020.nc +file = share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc From dd74f20e4b76d26072e5bdbade486942a8d26239 Mon Sep 17 00:00:00 2001 From: Bowen Fang Date: Tue, 14 Dec 2021 05:55:04 +0800 Subject: [PATCH 033/223] Implement "hasurban": only initialize urban landunits where they will grow. See https://github.com/ESCOMP/CTSM/issues/1572 --- src/main/clm_initializeMod.F90 | 7 ++-- src/main/clm_varsur.F90 | 4 +++ src/main/subgridMod.F90 | 65 +++++++++++++++++++++++++++++++--- src/main/subgridWeightsMod.F90 | 14 ++++++-- 4 files changed, 79 insertions(+), 11 deletions(-) diff --git a/src/main/clm_initializeMod.F90 b/src/main/clm_initializeMod.F90 index 2d1890ea1e..720a3aa6a8 100644 --- a/src/main/clm_initializeMod.F90 +++ b/src/main/clm_initializeMod.F90 @@ -16,7 +16,7 @@ module clm_initializeMod use clm_varctl , only : use_lch4, use_cn, use_cndv, use_c13, use_c14, use_fates use clm_varctl , only : use_soil_moisture_streams use clm_instur , only : wt_lunit, urban_valid, wt_nat_patch, wt_cft, fert_cft - use clm_instur , only : irrig_method, wt_glc_mec, topo_glc_mec, haslake + use clm_instur , only : irrig_method, wt_glc_mec, topo_glc_mec, haslake, hasurban use perf_mod , only : t_startf, t_stopf use readParamsMod , only : readParameters use ncdio_pio , only : file_desc_t @@ -121,7 +121,7 @@ subroutine initialize2(ni,nj) use clm_varctl , only : use_cn, use_fates use clm_varctl , only : use_crop, ndep_from_cpl, fates_spitfire_mode use clm_varorb , only : eccen, mvelpp, lambm0, obliqr - use landunit_varcon , only : landunit_varcon_init, max_lunit + use landunit_varcon , only : landunit_varcon_init, max_lunit, numurbl use pftconMod , only : pftcon use decompInitMod , only : decompInit_clumps, decompInit_glcp use domainMod , only : domain_check, ldomain, domain_init @@ -214,6 +214,7 @@ subroutine initialize2(ni,nj) allocate (wt_glc_mec (begg:endg, maxpatch_glc )) allocate (topo_glc_mec (begg:endg, maxpatch_glc )) allocate (haslake (begg:endg )) + allocate (hasurban (begg:endg, numurbl )) ! Read list of Patches and their corresponding parameter values ! Independent of model resolution, Needs to stay before surfrd_get_data @@ -288,7 +289,7 @@ subroutine initialize2(ni,nj) ! Deallocate surface grid dynamic memory for variables that aren't needed elsewhere. ! Some things are kept until the end of initialize2; urban_valid is kept through the ! end of the run for error checking. - deallocate (wt_lunit, wt_cft, wt_glc_mec, haslake) + deallocate (wt_lunit, wt_cft, wt_glc_mec, haslake, hasurban) ! Determine processor bounds and clumps for this processor call get_proc_bounds(bounds_proc) diff --git a/src/main/clm_varsur.F90 b/src/main/clm_varsur.F90 index 41740f1e2b..e91c6a5880 100644 --- a/src/main/clm_varsur.F90 +++ b/src/main/clm_varsur.F90 @@ -48,6 +48,10 @@ module clm_instur ! whether we have lake to initialise in each grid cell logical , pointer :: haslake(:) + + ! whether we have urban to initialize in each grid cell + ! (second dimension goes 0:numurbl-1) + logical , pointer :: hasurban(:,:) !----------------------------------------------------------------------- end module clm_instur diff --git a/src/main/subgridMod.F90 b/src/main/subgridMod.F90 index 42a3bb0fb9..d0eb285d65 100644 --- a/src/main/subgridMod.F90 +++ b/src/main/subgridMod.F90 @@ -39,6 +39,7 @@ module subgridMod public :: subgrid_get_info_crop public :: crop_patch_exists ! returns true if the given crop patch should be created in memory public :: lake_landunit_exists ! returns true if the lake landunit should be created in memory + public :: urban_landunit_exists ! returns true if the urban landunit should be created in memory ! !PRIVATE MEMBER FUNCTIONS: private :: subgrid_get_info_urban @@ -348,6 +349,10 @@ subroutine subgrid_get_info_urban(gi, ltype, npatches, ncols, nlunits) ! ! In either case, for simplicity, we always allocate space for all columns on any ! allocated urban landunits. + + ! For dynamic urban: to improve efficiency, 'hasurban' is added in landuse.timeseries + ! that tells if any urban landunit ever grows in a given grid cell in a transient + ! run. The urban landunit is allocated only if hasurban is true. (#1572) if (run_zero_weight_urban) then if (urban_valid(gi)) then @@ -355,12 +360,18 @@ subroutine subgrid_get_info_urban(gi, ltype, npatches, ncols, nlunits) else this_landunit_exists = .false. end if +! else +! if (wt_lunit(gi, ltype) > 0.0_r8) then +! this_landunit_exists = .true. +! else +! this_landunit_exists = .false. +! end if else - if (wt_lunit(gi, ltype) > 0.0_r8) then - this_landunit_exists = .true. - else - this_landunit_exists = .false. - end if + if (urban_landunit_exists(gi, ltype)) then + this_landunit_exists = .true. + else + this_landunit_exists = .false. + end if end if if (this_landunit_exists) then @@ -599,4 +610,48 @@ function lake_landunit_exists(gi) result(exists) end function lake_landunit_exists +!----------------------------------------------------------------------- + function urban_landunit_exists(gi, ltype) result(exists) + ! + ! !DESCRIPTION: + ! Returns true if a landunit for urban should be created in memory + ! which is defined for gridcells which will grow urban, given by hasurban + ! + ! !USES: + use dynSubgridControlMod , only : get_do_transient_urban + use clm_instur , only : hasurban + use clm_varcon , only : isturb_MIN + ! + ! !ARGUMENTS: + logical :: exists ! function result + integer, intent(in) :: gi ! grid cell index + integer, intent(in) :: ltype !landunit type (isturb_tbd, etc.) + ! + ! !LOCAL VARIABLES: + + character(len=*), parameter :: subname = 'urban_landunit_exists' + !----------------------------------------------------------------------- + + if (get_do_transient_urban()) then + ! To support dynamic landunits, we initialize an urban land unit in each grid cell + ! in which there are urban. This is defined by the hasurban variable. + + if (hasurban(gi,ltype-isturb_MIN)) then + exists = .true. + else + exists = .false. + end if + + else + ! For a run without transient urban, only allocate memory for urban land units + ! actually present in run. + if (wt_lunit(gi, ltype) > 0.0_r8) then + exists = .true. + else + exists = .false. + end if + end if + + end function urban_landunit_exists + end module subgridMod diff --git a/src/main/subgridWeightsMod.F90 b/src/main/subgridWeightsMod.F90 index 224155914c..596d8880f5 100644 --- a/src/main/subgridWeightsMod.F90 +++ b/src/main/subgridWeightsMod.F90 @@ -334,9 +334,17 @@ logical function is_active_l(l, glc_behavior) is_active_l = .true. end if - if ((lun%itype(l) >= isturb_MIN .and. lun%itype(l) <= isturb_MAX) .and. & - run_zero_weight_urban) then - is_active_l = .true. +! if ((lun%itype(l) >= isturb_MIN .and. lun%itype(l) <= isturb_MAX) .and. & +! run_zero_weight_urban) then +! is_active_l = .true. +! end if + + ! Set urban land units to active, as long as memory is allocated for such land units. + ! By doing this, urban land units are also run virtually in grid cells which will grow + ! urban during the transient run. + + if (lun%itype(l) >= isturb_MIN .and. lun%itype(l) <= isturb_MAX) then + is_active_l = .true. end if ! In general, include a virtual natural vegetation landunit. This aids From b96352f65abb3c54997acdddb082214f23f78409 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 14 Dec 2021 06:05:39 -0700 Subject: [PATCH 034/223] more minor changes --- python/ctsm/site_and_regional/base_case.py | 29 +++++++++ .../site_and_regional/single_point_case.py | 26 -------- python/ctsm/subset_data.py | 61 +++++++++---------- 3 files changed, 57 insertions(+), 59 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 4b5fb09dce..0ed93eea6b 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -177,3 +177,32 @@ def update_metadata(self, nc): # for attr, value in attr_list.items(): # print (attr + " = "+str(value)) + @staticmethod + def create_fileout_name(filename, tag): + + basename = os.path.basename(filename) + items = basename.split("_") + today = date.today() + today_string = today.strftime("%y%m%d") + print (items) + print (len(items)) + new_string = ( + items[0] + + "_" + + items[2] + + "_" + + items[3] + + "_" + + items[4] + + "_" + + items[5] + + "_" + + items[6] + + "_" + + tag + + "_c" + + today_string + + ".nc" + ) + return new_string + diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 4fbdb9a7df..5061e32dfa 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -82,32 +82,6 @@ def create_tag(self): else: self.tag = str(self.plon) + "_" + str(self.plat) - @staticmethod - def create_fileout_name(filename, tag): - - basename = os.path.basename(filename) - items = basename.split("_") - today = date.today() - today_string = today.strftime("%y%m%d") - new_string = ( - items[0] - + "_" - + items[2] - + "_" - + items[3] - + "_" - + items[4] - + "_" - + items[5] - + "_" - + items[6] - + "_" - + tag - + "_c" - + today_string - + ".nc" - ) - return new_string def create_domain_at_point(self): logging.info("----------------------------------------------------------------------") diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 427bc51ae2..b9f562d793 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -60,7 +60,7 @@ ./subset_data.py point To run the script for a region: - ./subset_data.py reg + ./subset_data.py region To remove NPL from your environment on Cheyenne/Casper: deactivate @@ -119,7 +119,7 @@ def get_parser(): help="Two possible ways to run this sript, either:", dest="run_type" ) pt_parser = subparsers.add_parser("point", help="Run script for a single point.") - rg_parser = subparsers.add_parser("reg", help="Run script for a region.") + rg_parser = subparsers.add_parser("region", help="Run script for a region.") # -- signle point parser options pt_parser.add_argument( @@ -419,8 +419,6 @@ def main(): logging.info("User = " + myname) logging.info("Current directory = " + pwd) - # --------------------------------- # - if args.run_type == "point": logging.info( "----------------------------------------------------------------------------" @@ -488,6 +486,7 @@ def main(): # -- Set input and output filenames # -- Specify input and output directories dir_output = args.out_dir + # -- create output dir if it does not exist if not os.path.isdir(dir_output): os.mkdir(dir_output) @@ -498,17 +497,12 @@ def main(): ) dir_output_datm = os.path.join(dir_output, "datmdata/") - # -- create output dir if it does not exist if not os.path.isdir(dir_output_datm): os.mkdir(dir_output_datm) logging.info("dir_input_datm : " + dir_input_datm) # logging.info("dir_output_datm : " + dir_output_datm) # - # -- Set time stamp - #today = date.today() - #timetag = today.strftime("%y%m%d") - # -- Specify land domain file --------------------------------- fdomain_in = os.path.join( dir_inputdata, "share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc" @@ -516,8 +510,10 @@ def main(): fdomain_out = os.path.join(dir_output , single_point.add_tag_to_filename( fdomain_in, single_point.tag )) + single_point.fdomain_in = fdomain_in single_point.fdomain_out = fdomain_out + logging.info("fdomain_in : " + fdomain_in) # logging.info("fdomain_out : " + fdomain_out) # @@ -533,13 +529,15 @@ def main(): "lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc", ) - # fsurf_out = dir_output + single_point.add_tag_to_filename(fsurf_in, single_point.tag) # remove res from filename for singlept - fsurf_out = os.path.join( dir_output , single_point.create_fileout_name( + fsurf_out = os.path.join(dir_output , single_point.add_tag_to_filename( fsurf_in, single_point.tag )) + single_point.fsurf_in = fsurf_in single_point.fsurf_out = fsurf_out + print("fsurf_in : " + fsurf_in) # + print("fsurf_out : " + fsurf_out) # logging.info("fsurf_in : " + fsurf_in) # logging.info("fsurf_out : " + fsurf_out) # @@ -555,11 +553,13 @@ def main(): "lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc", ) # fluse_out = dir_output + single_point.add_tag_to_filename( fluse_in, single_point.tag ) # remove resolution from filename for singlept cases - fluse_out = dir_output + single_point.create_fileout_name( + fluse_out = os.path.join( dir_output , single_point.add_tag_to_filename( fluse_in, single_point.tag - ) + )) single_point.fluse_in = fluse_in single_point.fluse_out = fluse_out + print("fluse_in : " + fluse_in) # + print("fluse_out : " + fluse_out) # logging.info("fluse_in : " + fluse_in) # logging.info("fluse_out : " + fluse_out) # @@ -568,9 +568,9 @@ def main(): dir_clm_forcedata, "atm_forcing.datm7.GSWP3.0.5d.v1.c170516/domain.lnd.360x720_gswp3.0v1.c170606.nc", ) - fdatmdomain_out = dir_output_datm + single_point.add_tag_to_filename( + fdatmdomain_out = os.path.join(dir_output_datm , single_point.add_tag_to_filename( fdatmdomain_in, single_point.tag - ) + )) single_point.fdatmdomain_in = fdatmdomain_in single_point.fdatmdomain_out = fdatmdomain_out logging.info("fdatmdomain_in : " + fdatmdomain_in) # @@ -656,17 +656,14 @@ def main(): # -- Set input and output filenames # -- Specify input and output directories - dir_output = "/glade/scratch/" + myname + "/region/" + dir_output = args.out_dir + # -- create output dir if it does not exist if not os.path.isdir(dir_output): os.mkdir(dir_output) dir_inputdata = "/glade/p/cesmdata/cseg/inputdata/" dir_clm_forcedata = "/glade/p/cgd/tss/CTSM_datm_forcing_data/" - # -- Set time stamp - #today = date.today() - #timetag = today.strftime("%y%m%d") - # -- Specify land domain file --------------------------------- fdomain_in = os.path.join( dir_inputdata , "share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc" @@ -674,7 +671,10 @@ def main(): fdomain_out = os.path.join( dir_output , "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc" ) - # SinglePointCase.set_fdomain (fdomain) + fdomain_out = os.path.join(dir_output , region.add_tag_to_filename( + fdomain_in, region.tag + )) + region.fdomain_in = fdomain_in region.fdomain_out = fdomain_out logging.info("fdomain_in : " + fdomain_in) @@ -685,12 +685,10 @@ def main(): dir_inputdata , "lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc" ) - fsurf_out = os.path.join( - dir_output - , "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" - + region.tag - + "_c170824.nc" - ) + fsurf_out = os.path.join(dir_output , region.add_tag_to_filename( + fsurf_in, region.tag + )) + region.fsurf_in = fsurf_in region.fsurf_out = fsurf_out logging.info("fsurf_in : " + fdomain_in) @@ -701,12 +699,9 @@ def main(): dir_inputdata , "lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc" ) - fluse_out = os.path.join( - dir_output - , "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" - + region.tag - + ".c170824.nc" - ) + fluse_out = os.path.join(dir_output , region.add_tag_to_filename( + fluse_in, region.tag + )) region.fluse_in = fluse_in region.fluse_out = fluse_out logging.info("fluse_in : " + fdomain_in) From 455197a02393aedee1ec86178a6bbc311e1c8acc Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 14 Dec 2021 08:20:09 -0700 Subject: [PATCH 035/223] some documentation changes. --- python/ctsm/site_and_regional/base_case.py | 72 ++++--- .../ctsm/site_and_regional/regional_case.py | 81 ++++--- .../site_and_regional/single_point_case.py | 200 ++++++++++++------ python/ctsm/subset_data.py | 21 +- 4 files changed, 240 insertions(+), 134 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 0ed93eea6b..283da366a9 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -57,6 +57,16 @@ def __init__(self, create_domain, create_surfdata, create_landuse, create_datm): """ Initializes BaseCase with the given arguments. + Parameters + ---------- + create_domain : bool + Flag for creating domain file a region/single point + create_surfdata : bool + Flag for creating domain file a region/single point + create_landuse : bool + Flag for creating landuse file a region/single point + create_datmdata : bool + Flag for creating datm files a region/single point """ self.create_domain = create_domain self.create_surfdata = create_surfdata @@ -83,14 +93,20 @@ def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): """ Create 1d coordinate variables for a netcdf file to enable sel() method - Args + Parameters + ---------- filename (str) : name of the netcdf file lon_varname (str) : variable name that has 2d lon lat_varname (str) : variable name that has 2d lat x_dim (str) : dimension name in X -- lon y_dim (str): dimension name in Y -- lat - Returns: + Raises + ------ + None + + Returns + ------- f_out (xarray Dataset): Xarray Dataset with 1-d coords """ @@ -117,14 +133,17 @@ def add_tag_to_filename(filename, tag): Add the tag to just before that ending part and change the ending part to the current time tag. - Args + Parameters + ---------- filename (str) : file name tag (str) : string of a tag to be added to the end of filename - Raises: + Raises + ------ Error: When it cannot find . and _ in the filename. - Returns: + Returns + ------ fname_out (str): filename with the tag and date string added """ @@ -144,6 +163,20 @@ def update_metadata(self, nc): """ Class method for adding some new attributes (such as date, username) and remove the old attributes from the netcdf file. + + Parameters + ---------- + nc (xarray dataset) : + Xarray dataset of netcdf file that we'd want to update it's metadata. + + Raises + ------ + None + + Returns + ------ + None + """ # update attributes today = date.today() @@ -177,32 +210,3 @@ def update_metadata(self, nc): # for attr, value in attr_list.items(): # print (attr + " = "+str(value)) - @staticmethod - def create_fileout_name(filename, tag): - - basename = os.path.basename(filename) - items = basename.split("_") - today = date.today() - today_string = today.strftime("%y%m%d") - print (items) - print (len(items)) - new_string = ( - items[0] - + "_" - + items[2] - + "_" - + items[3] - + "_" - + items[4] - + "_" - + items[5] - + "_" - + items[6] - + "_" - + tag - + "_c" - + today_string - + ".nc" - ) - return new_string - diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 18893aa5d5..ef6a978a6e 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -1,14 +1,35 @@ -from ctsm.site_and_regional.base_case import BaseCase +""" +This module includes the definition for a RegionalCase classs. +""" +#-- Import libraries +#-- Import Python Standard Libraries +import logging +#-- 3rd party libraries import numpy as np import xarray as xr -import logging + +#-- import local classes for this script +from ctsm.site_and_regional.base_case import BaseCase logger = logging.getLogger(__name__) class RegionalCase(BaseCase): """ A case to encapsulate regional cases. + + ... + Attributes + ---------- + plat : float + latitude + plon : float + longitude + site_name: str -- default = None + Site name + + + """ def __init__( @@ -48,68 +69,70 @@ def create_domain_at_reg(self): #logging.debug ("Creating domain file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) logging.info("Creating domain file at region:"+ self.tag) # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") - lat = f2["lat"] - lon = f2["lon"] + f_in = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") + lat = f_in["lat"] + lon = f_in["lon"] # subset longitude and latitude arrays xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3 = f2.isel(nj=yind, ni=xind) + f_out = f_in.isel(nj=yind, ni=xind) # update attributes - self.update_metadata(f3) - f3.attrs["Created_from"] = self.fdomain_in + self.update_metadata(f_out) + f_out.attrs["Created_from"] = self.fdomain_in wfile = self.fdomain_out # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode="w") + f_out.to_netcdf(path=wfile, mode="w") logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) - f2.close() - f3.close() + f_in.close() + f_out.close() def create_surfdata_at_reg(self): #logging.debug ("Creating surface dataset file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) logging.info("Creating surface dataset file at region:"+ self.tag) # create 1d coordinate variables to enable sel() method filename = self.fsurf_in - f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") - lat = f2["lat"] - lon = f2["lon"] + f_in = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + lat = f_in["lat"] + lon = f_in["lon"] # subset longitude and latitude arrays xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3 = f2.isel(lsmlat=yind, lsmlon=xind) + f_out = f_in.isel(lsmlat=yind, lsmlon=xind) # update attributes - self.update_metadata(f3) - f3.attrs["Created_from"] = self.fsurf_in + self.update_metadata(f_out) + f_out.attrs["Created_from"] = self.fsurf_in # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode="w") + f_out.to_netcdf(path=self.fsurf_out, mode="w") logging.info("created file (fsurf_out)" + self.fsurf_out) # f1.close(); - f2.close() - f3.close() + f_in.close() + f_out.close() def create_landuse_at_reg(self): #logging.debug ("Creating landuse file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) logging.info("Creating landuse file at region:"+ self.tag) # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") - lat = f2["lat"] - lon = f2["lon"] + f_in = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + lat = f_in["lat"] + lon = f_in["lon"] # subset longitude and latitude arrays xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f3 = f2.isel(lsmlat=yind, lsmlon=xind) + f_out = f_in.isel(lsmlat=yind, lsmlon=xind) # update attributes - self.update_metadata(f3) - f3.attrs["Created_from"] = self.fluse_in + self.update_metadata(f_out) + f_out.attrs["Created_from"] = self.fluse_in wfile = self.fluse_out # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode="w") + f_out.to_netcdf(path=wfile, mode="w") logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) - f2.close() - f3.close() + f_in.close() + f_out.close() + + diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 5061e32dfa..ccdab772ff 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -1,13 +1,12 @@ """ -This module includes the definition for singlepoint class. +This module includes the definition for SinglePointCase class. """ #-- Import libraries +#-- Import Python Standard Libraries import os import logging -from datetime import date - #-- 3rd party libraries import numpy as np import xarray as xr @@ -19,18 +18,36 @@ class SinglePointCase(BaseCase): """ - A case to encapsulate single point cases. + A class to encapsulate everything for single point cases. ... Attributes ---------- plat : float - latitude + latitude of the single point plon : float - longitude + longitude of the single point site_name: str -- default = None Site name + create_domain : bool + flag for creating domain file + create_surfdata : bool + flag for creating surface dataset + create_landuse : bool + flag for creating landuse file + create_datm : bool + flag for creating DATM files + overwrite_single_pft : bool + flag to overwrite the whole grid 100% single PFT. + dominant_pft : int + dominant pft type for this single point + zero_nonveg_landunits : bool + flag for setting all non-vegetation landunits to zero + overwrite_single_pft : bool + flag for creating datasets using uniform snowpack + saturation_excess : bool + flag for making dataset using saturation excess Methods ------- @@ -49,6 +66,12 @@ class SinglePointCase(BaseCase): create_datmdomain_at_point: Create DATM domain file at a single point. + + extract_datm_at: + Extract DATM for one file at a single point. + + create_datm_at_point: + Extract all DATM data at a single point. """ def __init__( @@ -66,6 +89,38 @@ def __init__( uniform_snowpack, saturation_excess, ): + """ + Initializes SinglePointCase with the given arguments. + + Parameters + ---------- + plat : float + latitude of the single point + plon : float + longitude of the single point + site_name: str -- default = None + Site name + create_domain : bool + flag for creating domain file + create_surfdata : bool + flag for creating surface dataset + create_landuse : bool + flag for creating landuse file + create_datm : bool + flag for creating DATM files + overwrite_single_pft : bool + flag to overwrite the whole grid 100% single PFT. + dominant_pft : int + dominant pft type for this single point + zero_nonveg_landunits : bool + flag for setting all non-vegetation landunits to zero + overwrite_single_pft : bool + flag for creating datasets using uniform snowpack + saturation_excess : bool + flag for making dataset using saturation excess + + """ + super().__init__(create_domain, create_surfdata, create_landuse, create_datm) self.plat = plat self.plon = plon @@ -77,6 +132,10 @@ def __init__( self.saturation_excess = saturation_excess def create_tag(self): + """ + Create a tag for single point which is the site name + or the "lon-lat" format if the site name does not exist. + """ if self.site_name: self.tag = self.site_name else: @@ -84,91 +143,100 @@ def create_tag(self): def create_domain_at_point(self): + """ + Create domain file for this SinglePointCase class. + """ logging.info("----------------------------------------------------------------------") logging.info("Creating domain file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") + f_in = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") # extract gridcell closest to plon/plat - f3 = f2.sel(ni=self.plon, nj=self.plat, method="nearest") + f_out = f_in.sel(ni=self.plon, nj=self.plat, method="nearest") # expand dimensions - f3 = f3.expand_dims(["nj", "ni"]) + f_out = f_out.expand_dims(["nj", "ni"]) # update attributes - self.update_metadata(f3) - f3.attrs["Created_from"] = self.fdomain_in + self.update_metadata(f_out) + f_out.attrs["Created_from"] = self.fdomain_in wfile = self.fdomain_out - f3.to_netcdf(path=wfile, mode="w") + f_out.to_netcdf(path=wfile, mode="w") logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) - f2.close() - f3.close() + f_in.close() + f_out.close() def create_landuse_at_point(self): + """ + Create landuse file at a single point. + """ logging.info("----------------------------------------------------------------------") logging.info("Creating landuse file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f_in = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat - f3 = f2.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") + f_out = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") # expand dimensions - f3 = f3.expand_dims(["lsmlat", "lsmlon"]) + f_out = f_out.expand_dims(["lsmlat", "lsmlon"]) # specify dimension order - # f3 = f3.transpose('time','lat','lon') - f3 = f3.transpose(u"time", u"cft", u"natpft", u"lsmlat", u"lsmlon") - # f3['YEAR'] = f3['YEAR'].squeeze() + # f_out = f_out.transpose('time','lat','lon') + f_out = f_out.transpose(u"time", u"cft", u"natpft", u"lsmlat", u"lsmlon") + # f_out['YEAR'] = f_out['YEAR'].squeeze() # revert expand dimensions of YEAR - year = np.squeeze(np.asarray(f3["YEAR"])) - x = xr.DataArray(year, coords={"time": f3["time"]}, dims="time", name="YEAR") + year = np.squeeze(np.asarray(f_out["YEAR"])) + x = xr.DataArray(year, coords={"time": f_out["time"]}, dims="time", name="YEAR") x.attrs["units"] = "unitless" x.attrs["long_name"] = "Year of PFT data" - f3["YEAR"] = x + f_out["YEAR"] = x # update attributes - self.update_metadata(f3) - f3.attrs["Created_from"] = self.fluse_in + self.update_metadata(f_out) + f_out.attrs["Created_from"] = self.fluse_in wfile = self.fluse_out # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode="w") + f_out.to_netcdf(path=wfile, mode="w") logging.info("Successfully created file (luse_out)" + self.fluse_out+ ".") - f2.close() - f3.close() + f_in.close() + f_out.close() def create_surfdata_at_point(self): + """ + Create surface data file at a single point. + """ logging.info("----------------------------------------------------------------------") logging.info("Creating surface dataset file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") # create 1d coordinate variables to enable sel() method filename = self.fsurf_in - f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f_in = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat - f3 = f2.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") + f_out = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") # expand dimensions - f3 = f3.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) + f_out = f_out.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) # modify surface data properties if self.overwrite_single_pft: - f3["PCT_NAT_PFT"][:, :, :] = 0 - f3["PCT_NAT_PFT"][:, :, self.dominant_pft] = 100 + f_out["PCT_NAT_PFT"][:, :, :] = 0 + f_out["PCT_NAT_PFT"][:, :, self.dominant_pft] = 100 if self.zero_nonveg_landunits: - f3["PCT_NATVEG"][:, :] = 100 - f3["PCT_CROP"][:, :] = 0 - f3["PCT_LAKE"][:, :] = 0.0 - f3["PCT_WETLAND"][:, :] = 0.0 - f3["PCT_URBAN"][ + f_out["PCT_NATVEG"][:, :] = 100 + f_out["PCT_CROP"][:, :] = 0 + f_out["PCT_LAKE"][:, :] = 0.0 + f_out["PCT_WETLAND"][:, :] = 0.0 + f_out["PCT_URBAN"][ :, :, ] = 0.0 - f3["PCT_GLACIER"][:, :] = 0.0 + f_out["PCT_GLACIER"][:, :] = 0.0 if self.uniform_snowpack: - f3["STD_ELEV"][:, :] = 20.0 + f_out["STD_ELEV"][:, :] = 20.0 if not self.saturation_excess: - f3["FMAX"][:, :] = 0.0 + f_out["FMAX"][:, :] = 0.0 # specify dimension order - # f3 = f3.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') - f3 = f3.transpose( + # f_out = f_out.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') + f_out = f_out.transpose( u"time", u"cft", u"lsmpft", @@ -184,53 +252,53 @@ def create_surfdata_at_point(self): ) # update attributes - self.update_metadata(f3) - f3.attrs["Created_from"] = self.fsurf_in - del f3.attrs["History_Log"] + self.update_metadata(f_out) + f_out.attrs["Created_from"] = self.fsurf_in + del f_out.attrs["History_Log"] # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode="w") + f_out.to_netcdf(path=self.fsurf_out, mode="w") logging.info("Successfully created file (fsurf_out) :" + self.fsurf_out) - f2.close() - f3.close() + f_in.close() + f_out.close() def create_datmdomain_at_point(self): logging.info("----------------------------------------------------------------------") logging.info("Creating DATM domain file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") # create 1d coordinate variables to enable sel() method filename = self.fdatmdomain_in - f2 = self.create_1d_coord(filename, "xc", "yc", "ni", "nj") + f_in = self.create_1d_coord(filename, "xc", "yc", "ni", "nj") # extract gridcell closest to plon/plat - f3 = f2.sel(ni=self.plon, nj=self.plat, method="nearest") + f_out = f_in.sel(ni=self.plon, nj=self.plat, method="nearest") # expand dimensions - f3 = f3.expand_dims(["nj", "ni"]) + f_out = f_out.expand_dims(["nj", "ni"]) wfile = self.fdatmdomain_out # update attributes - self.update_metadata(f3) - f3.attrs["Created_from"] = self.fdatmdomain_in + self.update_metadata(f_out) + f_out.attrs["Created_from"] = self.fdatmdomain_in # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode="w") + f_out.to_netcdf(path=wfile, mode="w") logging.info("Successfully created file (fdatmdomain_out) :" + self.fdatmdomain_out) - f2.close() - f3.close() + f_in.close() + f_out.close() def extract_datm_at(self, file_in, file_out): # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(file_in, "LONGXY", "LATIXY", "lon", "lat") + f_in = self.create_1d_coord(file_in, "LONGXY", "LATIXY", "lon", "lat") # extract gridcell closest to plon/plat - f3 = f2.sel(lon=self.plon, lat=self.plat, method="nearest") + f_out = f_in.sel(lon=self.plon, lat=self.plat, method="nearest") # expand dimensions - f3 = f3.expand_dims(["lat", "lon"]) + f_out = f_out.expand_dims(["lat", "lon"]) # specify dimension order - f3 = f3.transpose(u"scalar", "time", "lat", "lon") + f_out = f_out.transpose(u"scalar", "time", "lat", "lon") # update attributes - self.update_metadata(f3) - f3.attrs["Created_from"] = file_in + self.update_metadata(f_out) + f_out.attrs["Created_from"] = file_in # mode 'w' overwrites file - f3.to_netcdf(path=file_out, mode="w") + f_out.to_netcdf(path=file_out, mode="w") logging.info("Successfully created file :" + file_out) - f2.close() - f3.close() + f_in.close() + f_out.close() def create_datm_at_point(self): logging.info("----------------------------------------------------------------------") diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index b9f562d793..e226930182 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -239,6 +239,17 @@ def get_parser(): type=str, default="", ) + rg_parser.add_argument( + "--create-mesh", + help="Flag for subsetting mesh file. [default: %(default)s]", + action="store", + dest="create_mesh", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) # -- common options between both subparsers for subparser in [pt_parser, rg_parser]: @@ -536,8 +547,6 @@ def main(): single_point.fsurf_in = fsurf_in single_point.fsurf_out = fsurf_out - print("fsurf_in : " + fsurf_in) # - print("fsurf_out : " + fsurf_out) # logging.info("fsurf_in : " + fsurf_in) # logging.info("fsurf_out : " + fsurf_out) # @@ -558,8 +567,7 @@ def main(): )) single_point.fluse_in = fluse_in single_point.fluse_out = fluse_out - print("fluse_in : " + fluse_in) # - print("fluse_out : " + fluse_out) # + logging.info("fluse_in : " + fluse_in) # logging.info("fluse_out : " + fluse_out) # @@ -600,7 +608,7 @@ def main(): logging.info("Successfully ran script for single point.") exit() - elif args.run_type == "reg": + elif args.run_type == "region": logging.info( "----------------------------------------------------------------------------" ) @@ -627,6 +635,9 @@ def main(): # -- Create DATM atmospheric forcing data create_datm = args.create_datm + # -- Create mesh file + create_mesh = args.create_mesh + crop_flag = args.crop_flag reg_name = args.reg_name From 97f52b9861bcd94017adeec32cfadc9100336a4a Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 14 Dec 2021 08:28:09 -0700 Subject: [PATCH 036/223] formatting changes --- python/ctsm/subset_data.py | 41 +++++++++++++++----------------------- 1 file changed, 16 insertions(+), 25 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index e226930182..dd9dd4c5ec 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -71,37 +71,35 @@ # -[] Automatic downloading of missing files if they are missing # default 78 pft vs 16 pft -#-- Import libraries +# -- Import libraries -#-- standard libraries +# -- standard libraries import os -import sys -import string import logging import argparse import textwrap -import subprocess from datetime import date from getpass import getuser from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter -#-- 3rd party libraries +# -- 3rd party libraries import numpy as np import xarray as xr -#-- import local classes for this script +# -- import local classes for this script from ctsm.site_and_regional.base_case import BaseCase from ctsm.site_and_regional.single_point_case import SinglePointCase from ctsm.site_and_regional.regional_case import RegionalCase from ctsm.utils import str2bool -#-- import ctsm logging flags +# -- import ctsm logging flags from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args logger = logging.getLogger(__name__) + def get_parser(): """ Get the parser object for subset_data.py script. @@ -249,7 +247,7 @@ def get_parser(): const=True, required=False, default=False, - ) + ) # -- common options between both subparsers for subparser in [pt_parser, rg_parser]: @@ -411,7 +409,6 @@ def plon_type(x): def main(): - # -- add logging flags from ctsm_logging setup_logging_pre_config() parser = get_parser() @@ -421,9 +418,6 @@ def main(): process_logging_args(args) - today = date.today() - today_string = today.strftime("%Y%m%d") - myname = getuser() pwd = os.getcwd() @@ -518,7 +512,7 @@ def main(): fdomain_in = os.path.join( dir_inputdata, "share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc" ) - fdomain_out = os.path.join(dir_output , single_point.add_tag_to_filename( + fdomain_out = os.path.join(dir_output, single_point.add_tag_to_filename( fdomain_in, single_point.tag )) @@ -540,7 +534,7 @@ def main(): "lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc", ) - fsurf_out = os.path.join(dir_output , single_point.add_tag_to_filename( + fsurf_out = os.path.join(dir_output, single_point.add_tag_to_filename( fsurf_in, single_point.tag )) @@ -562,7 +556,7 @@ def main(): "lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc", ) # fluse_out = dir_output + single_point.add_tag_to_filename( fluse_in, single_point.tag ) # remove resolution from filename for singlept cases - fluse_out = os.path.join( dir_output , single_point.add_tag_to_filename( + fluse_out = os.path.join(dir_output, single_point.add_tag_to_filename( fluse_in, single_point.tag )) single_point.fluse_in = fluse_in @@ -576,7 +570,7 @@ def main(): dir_clm_forcedata, "atm_forcing.datm7.GSWP3.0.5d.v1.c170516/domain.lnd.360x720_gswp3.0v1.c170606.nc", ) - fdatmdomain_out = os.path.join(dir_output_datm , single_point.add_tag_to_filename( + fdatmdomain_out = os.path.join(dir_output_datm, single_point.add_tag_to_filename( fdatmdomain_in, single_point.tag )) single_point.fdatmdomain_in = fdatmdomain_in @@ -612,7 +606,7 @@ def main(): logging.info( "----------------------------------------------------------------------------" ) - logging.info( + logging.info( "This script extracts a single point from the global CTSM inputdata datasets." ) @@ -677,12 +671,9 @@ def main(): # -- Specify land domain file --------------------------------- fdomain_in = os.path.join( - dir_inputdata , "share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc" - ) - fdomain_out = os.path.join( - dir_output , "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc" + dir_inputdata, "share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc" ) - fdomain_out = os.path.join(dir_output , region.add_tag_to_filename( + fdomain_out = os.path.join(dir_output, region.add_tag_to_filename( fdomain_in, region.tag )) @@ -696,7 +687,7 @@ def main(): dir_inputdata , "lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc" ) - fsurf_out = os.path.join(dir_output , region.add_tag_to_filename( + fsurf_out = os.path.join(dir_output, region.add_tag_to_filename( fsurf_in, region.tag )) @@ -710,7 +701,7 @@ def main(): dir_inputdata , "lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc" ) - fluse_out = os.path.join(dir_output , region.add_tag_to_filename( + fluse_out = os.path.join(dir_output, region.add_tag_to_filename( fluse_in, region.tag )) region.fluse_in = fluse_in From 04b38a5037e812c15fa600ed8d1ceec1987d450c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 14 Dec 2021 08:59:00 -0700 Subject: [PATCH 037/223] more changes to docstrings. --- .../ctsm/site_and_regional/regional_case.py | 47 +++++++++++++---- .../site_and_regional/single_point_case.py | 51 +++++++++---------- python/ctsm/subset_data.py | 26 ++++++---- 3 files changed, 77 insertions(+), 47 deletions(-) diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index ef6a978a6e..d8905e08bf 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -16,19 +16,45 @@ class RegionalCase(BaseCase): """ - A case to encapsulate regional cases. + A class to encapsulate regional cases. ... Attributes ---------- - plat : float - latitude - plon : float - longitude - site_name: str -- default = None - Site name - - + lat1 : float + first (left) latitude of a region. + lat1 : float + second (right) latitude of a region. + lon1 : float + first (bottom) longitude of a region. + lon2 : float + second (top) longitude of a region. + reg_name: str -- default = None + Region's name + create_domain : bool + flag for creating domain file + create_surfdata : bool + flag for creating surface dataset + create_landuse : bool + flag for creating landuse file + create_datm : bool + flag for creating DATM files + + Methods + ------- + create_tag + Create a tag for this region which is either + region's name or a combination of bounds of this + region lat1-lat2_lon1-lon2 + + create_domain_at_reg + Create domain file at this region + + create_surfdata_at_reg + Create surface dataset at this region + + create_landuse_at_reg + Create landuse file at this region """ @@ -44,6 +70,9 @@ def __init__( create_landuse, create_datm, ): + """ + Initializes SinglePointCase with the given arguments. + """ super().__init__(create_domain, create_surfdata, create_landuse, create_datm) self.lat1 = lat1 self.lat2 = lat2 diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index ccdab772ff..b63d4adfaf 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -92,33 +92,6 @@ def __init__( """ Initializes SinglePointCase with the given arguments. - Parameters - ---------- - plat : float - latitude of the single point - plon : float - longitude of the single point - site_name: str -- default = None - Site name - create_domain : bool - flag for creating domain file - create_surfdata : bool - flag for creating surface dataset - create_landuse : bool - flag for creating landuse file - create_datm : bool - flag for creating DATM files - overwrite_single_pft : bool - flag to overwrite the whole grid 100% single PFT. - dominant_pft : int - dominant pft type for this single point - zero_nonveg_landunits : bool - flag for setting all non-vegetation landunits to zero - overwrite_single_pft : bool - flag for creating datasets using uniform snowpack - saturation_excess : bool - flag for making dataset using saturation excess - """ super().__init__(create_domain, create_surfdata, create_landuse, create_datm) @@ -148,10 +121,13 @@ def create_domain_at_point(self): """ logging.info("----------------------------------------------------------------------") logging.info("Creating domain file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") + # extract gridcell closest to plon/plat f_out = f_in.sel(ni=self.plon, nj=self.plat, method="nearest") + # expand dimensions f_out = f_out.expand_dims(["nj", "ni"]) @@ -171,13 +147,16 @@ def create_landuse_at_point(self): """ logging.info("----------------------------------------------------------------------") logging.info("Creating landuse file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + # extract gridcell closest to plon/plat f_out = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") # expand dimensions f_out = f_out.expand_dims(["lsmlat", "lsmlon"]) + # specify dimension order # f_out = f_out.transpose('time','lat','lon') f_out = f_out.transpose(u"time", u"cft", u"natpft", u"lsmlat", u"lsmlon") @@ -207,11 +186,14 @@ def create_surfdata_at_point(self): """ logging.info("----------------------------------------------------------------------") logging.info("Creating surface dataset file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + # create 1d coordinate variables to enable sel() method filename = self.fsurf_in f_in = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + # extract gridcell closest to plon/plat f_out = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") + # expand dimensions f_out = f_out.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) @@ -262,19 +244,27 @@ def create_surfdata_at_point(self): f_out.close() def create_datmdomain_at_point(self): + """ + Create DATM domain file at a single point + """ logging.info("----------------------------------------------------------------------") logging.info("Creating DATM domain file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + # create 1d coordinate variables to enable sel() method filename = self.fdatmdomain_in f_in = self.create_1d_coord(filename, "xc", "yc", "ni", "nj") + # extract gridcell closest to plon/plat f_out = f_in.sel(ni=self.plon, nj=self.plat, method="nearest") + # expand dimensions f_out = f_out.expand_dims(["nj", "ni"]) wfile = self.fdatmdomain_out + # update attributes self.update_metadata(f_out) f_out.attrs["Created_from"] = self.fdatmdomain_in + # mode 'w' overwrites file f_out.to_netcdf(path=wfile, mode="w") logging.info("Successfully created file (fdatmdomain_out) :" + self.fdatmdomain_out) @@ -284,16 +274,20 @@ def create_datmdomain_at_point(self): def extract_datm_at(self, file_in, file_out): # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(file_in, "LONGXY", "LATIXY", "lon", "lat") + # extract gridcell closest to plon/plat f_out = f_in.sel(lon=self.plon, lat=self.plat, method="nearest") + # expand dimensions f_out = f_out.expand_dims(["lat", "lon"]) + # specify dimension order f_out = f_out.transpose(u"scalar", "time", "lat", "lon") # update attributes self.update_metadata(f_out) f_out.attrs["Created_from"] = file_in + # mode 'w' overwrites file f_out.to_netcdf(path=file_out, mode="w") logging.info("Successfully created file :" + file_out) @@ -301,6 +295,9 @@ def extract_datm_at(self, file_in, file_out): f_out.close() def create_datm_at_point(self): + """ + Create all DATM dataset at a point. + """ logging.info("----------------------------------------------------------------------") logging.info("Creating DATM files at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") # -- specify subdirectory names and filename prefixes diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index dd9dd4c5ec..6c8cd5c9e0 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -436,7 +436,7 @@ def main(): plon = args.plon plat = args.plat - # -- Create regional CLM domain file + # -- Create single point CLM domain file create_domain = args.create_domain # -- Create CLM surface data file @@ -505,8 +505,8 @@ def main(): if not os.path.isdir(dir_output_datm): os.mkdir(dir_output_datm) - logging.info("dir_input_datm : " + dir_input_datm) # - logging.info("dir_output_datm : " + dir_output_datm) # + logging.info("dir_input_datm : " + dir_input_datm) + logging.info("dir_output_datm : " + dir_output_datm) # -- Specify land domain file --------------------------------- fdomain_in = os.path.join( @@ -519,8 +519,8 @@ def main(): single_point.fdomain_in = fdomain_in single_point.fdomain_out = fdomain_out - logging.info("fdomain_in : " + fdomain_in) # - logging.info("fdomain_out : " + fdomain_out) # + logging.info("fdomain_in : " + fdomain_in) + logging.info("fdomain_out : " + fdomain_out) # -- Specify surface data file -------------------------------- if crop_flag: @@ -541,8 +541,8 @@ def main(): single_point.fsurf_in = fsurf_in single_point.fsurf_out = fsurf_out - logging.info("fsurf_in : " + fsurf_in) # - logging.info("fsurf_out : " + fsurf_out) # + logging.info("fsurf_in : " + fsurf_in) + logging.info("fsurf_out : " + fsurf_out) # -- Specify landuse file ------------------------------------- if crop_flag: @@ -562,8 +562,8 @@ def main(): single_point.fluse_in = fluse_in single_point.fluse_out = fluse_out - logging.info("fluse_in : " + fluse_in) # - logging.info("fluse_out : " + fluse_out) # + logging.info("fluse_in : " + fluse_in) + logging.info("fluse_out : " + fluse_out) # -- Specify datm domain file --------------------------------- fdatmdomain_in = os.path.join( @@ -575,8 +575,9 @@ def main(): )) single_point.fdatmdomain_in = fdatmdomain_in single_point.fdatmdomain_out = fdatmdomain_out - logging.info("fdatmdomain_in : " + fdatmdomain_in) # - logging.info("fdatmdomain out : " + fdatmdomain_out) # + + logging.info("fdatmdomain_in : " + fdatmdomain_in) + logging.info("fdatmdomain out : " + fdatmdomain_out) # -- Create CTSM domain file if create_domain: @@ -679,6 +680,7 @@ def main(): region.fdomain_in = fdomain_in region.fdomain_out = fdomain_out + logging.info("fdomain_in : " + fdomain_in) logging.info("fdomain_out : " + fdomain_out) @@ -693,6 +695,7 @@ def main(): region.fsurf_in = fsurf_in region.fsurf_out = fsurf_out + logging.info("fsurf_in : " + fdomain_in) logging.info("fsurf_out : " + fdomain_out) @@ -706,6 +709,7 @@ def main(): )) region.fluse_in = fluse_in region.fluse_out = fluse_out + logging.info("fluse_in : " + fdomain_in) logging.info("fluse_out : " + fdomain_out) From 47515a927b2ae2ff6cd5b2964419e8dca0c180d0 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 14 Dec 2021 09:22:30 -0700 Subject: [PATCH 038/223] more changes and reformatting. --- python/ctsm/ctsm_logging.py | 10 +++ python/ctsm/git_utils.py | 32 ++++--- python/ctsm/site_and_regional/base_case.py | 18 ++-- .../ctsm/site_and_regional/regional_case.py | 29 +++---- .../site_and_regional/single_point_case.py | 84 ++++++++++++++----- python/ctsm/subset_data.py | 58 +++++++------ python/ctsm/utils.py | 4 +- 7 files changed, 145 insertions(+), 90 deletions(-) diff --git a/python/ctsm/ctsm_logging.py b/python/ctsm/ctsm_logging.py index 7d63b9b463..1b75154ff7 100644 --- a/python/ctsm/ctsm_logging.py +++ b/python/ctsm/ctsm_logging.py @@ -77,3 +77,13 @@ def process_logging_args(args): root_logger.setLevel(logging.INFO) else: root_logger.setLevel(logging.WARNING) + +def output_to_file(filepath, message, log_to_logger=False): + """ + helper functionn to write to log file. + """ + with open(filepath, 'a') as fl: + fl.write(message) + if log_to_logger: + logger.info(message) + diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py index e676f3ccaf..651051800e 100644 --- a/python/ctsm/git_utils.py +++ b/python/ctsm/git_utils.py @@ -5,36 +5,32 @@ logger = logging.getLogger(__name__) -def get_git_short_hash(): - """ + +def get_git_short_hash(): + """ Returns Git short SHA for the currect directory. - """ - try: - + """ + try: + # os.abspath(__file__) - sha = ( + sha = ( subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) .strip() .decode() - ) + ) except subprocess.CalledProcessError: sha = "NOT-A-GIT-REPOSITORY" return sha -def get_git_long_hash(): - """ +def get_git_long_hash(): + """ Returns Git long SHA for the currect directory. - """ - try: - + """ + try: + # os.abspath(__file__) - sha = ( - subprocess.check_output(["git", "rev-parse", "HEAD"]) - .strip() - .decode() - ) + sha = subprocess.check_output(["git", "rev-parse", "HEAD"]).strip().decode() except subprocess.CalledProcessError: sha = "NOT-A-GIT-REPOSITORY" return sha - diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 283da366a9..fad8a4b6ed 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -3,9 +3,9 @@ and RegionalCase. The common functionalities of SinglePointCase and RegionalCase are defined in this Class. """ -#-- Import libraries +# -- Import libraries -#-- standard libraries +# -- standard libraries import os import logging import subprocess @@ -13,15 +13,16 @@ from datetime import date from getpass import getuser -#-- 3rd party libraries +# -- 3rd party libraries import numpy as np import xarray as xr -#-- import local classes for this script +# -- import local classes for this script from ctsm.git_utils import get_git_short_hash logger = logging.getLogger(__name__) + class BaseCase: """ Parent class to SinglePointCase and RegionalCase @@ -103,7 +104,7 @@ def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): Raises ------ - None + None Returns ------- @@ -152,7 +153,9 @@ def add_tag_to_filename(filename, tag): if basename[cend] == "c": cend = cend - 1 if (basename[cend] != ".") and (basename[cend] != "_"): - logging.error("Trouble figuring out where to add tag to filename:" + filename) + logging.error( + "Trouble figuring out where to add tag to filename:" + filename + ) os.abort() today = date.today() today_string = today.strftime("%y%m%d") @@ -204,9 +207,8 @@ def update_metadata(self, nc): for attr in del_attrs: if attr in attr_list: - logging.debug ("This attr should be deleted : "+ attr) + logging.debug("This attr should be deleted : " + attr) del nc.attrs[attr] # for attr, value in attr_list.items(): # print (attr + " = "+str(value)) - diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index d8905e08bf..708e29befb 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -1,19 +1,20 @@ """ This module includes the definition for a RegionalCase classs. """ -#-- Import libraries -#-- Import Python Standard Libraries +# -- Import libraries +# -- Import Python Standard Libraries import logging -#-- 3rd party libraries +# -- 3rd party libraries import numpy as np import xarray as xr -#-- import local classes for this script +# -- import local classes for this script from ctsm.site_and_regional.base_case import BaseCase logger = logging.getLogger(__name__) + class RegionalCase(BaseCase): """ A class to encapsulate regional cases. @@ -70,7 +71,7 @@ def __init__( create_landuse, create_datm, ): - """ + """ Initializes SinglePointCase with the given arguments. """ super().__init__(create_domain, create_surfdata, create_landuse, create_datm) @@ -95,8 +96,8 @@ def create_tag(self): ) def create_domain_at_reg(self): - #logging.debug ("Creating domain file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) - logging.info("Creating domain file at region:"+ self.tag) + # logging.debug ("Creating domain file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + logging.info("Creating domain file at region:" + self.tag) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") lat = f_in["lat"] @@ -118,8 +119,8 @@ def create_domain_at_reg(self): f_out.close() def create_surfdata_at_reg(self): - #logging.debug ("Creating surface dataset file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) - logging.info("Creating surface dataset file at region:"+ self.tag) + # logging.debug ("Creating surface dataset file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + logging.info("Creating surface dataset file at region:" + self.tag) # create 1d coordinate variables to enable sel() method filename = self.fsurf_in f_in = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") @@ -142,10 +143,12 @@ def create_surfdata_at_reg(self): f_out.close() def create_landuse_at_reg(self): - #logging.debug ("Creating landuse file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) - logging.info("Creating landuse file at region:"+ self.tag) + # logging.debug ("Creating landuse file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + logging.info("Creating landuse file at region:" + self.tag) # create 1d coordinate variables to enable sel() method - f_in = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f_in = self.create_1d_coord( + self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat" + ) lat = f_in["lat"] lon = f_in["lon"] # subset longitude and latitude arrays @@ -163,5 +166,3 @@ def create_landuse_at_reg(self): logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) f_in.close() f_out.close() - - diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index b63d4adfaf..3149416b0c 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -2,20 +2,21 @@ This module includes the definition for SinglePointCase class. """ -#-- Import libraries -#-- Import Python Standard Libraries +# -- Import libraries +# -- Import Python Standard Libraries import os import logging -#-- 3rd party libraries +# -- 3rd party libraries import numpy as np import xarray as xr -#-- import local classes for this script +# -- import local classes for this script from ctsm.site_and_regional.base_case import BaseCase logger = logging.getLogger(__name__) + class SinglePointCase(BaseCase): """ A class to encapsulate everything for single point cases. @@ -89,7 +90,7 @@ def __init__( uniform_snowpack, saturation_excess, ): - """ + """ Initializes SinglePointCase with the given arguments. """ @@ -114,13 +115,20 @@ def create_tag(self): else: self.tag = str(self.plon) + "_" + str(self.plat) - def create_domain_at_point(self): """ Create domain file for this SinglePointCase class. """ - logging.info("----------------------------------------------------------------------") - logging.info("Creating domain file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + logging.info( + "----------------------------------------------------------------------" + ) + logging.info( + "Creating domain file at " + + self.plon.__str__() + + " " + + self.plat.__str__() + + "." + ) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") @@ -145,11 +153,21 @@ def create_landuse_at_point(self): """ Create landuse file at a single point. """ - logging.info("----------------------------------------------------------------------") - logging.info("Creating landuse file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + logging.info( + "----------------------------------------------------------------------" + ) + logging.info( + "Creating landuse file at " + + self.plon.__str__() + + " " + + self.plat.__str__() + + "." + ) # create 1d coordinate variables to enable sel() method - f_in = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f_in = self.create_1d_coord( + self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat" + ) # extract gridcell closest to plon/plat f_out = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") @@ -176,7 +194,7 @@ def create_landuse_at_point(self): wfile = self.fluse_out # mode 'w' overwrites file f_out.to_netcdf(path=wfile, mode="w") - logging.info("Successfully created file (luse_out)" + self.fluse_out+ ".") + logging.info("Successfully created file (luse_out)" + self.fluse_out + ".") f_in.close() f_out.close() @@ -184,8 +202,16 @@ def create_surfdata_at_point(self): """ Create surface data file at a single point. """ - logging.info("----------------------------------------------------------------------") - logging.info("Creating surface dataset file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + logging.info( + "----------------------------------------------------------------------" + ) + logging.info( + "Creating surface dataset file at " + + self.plon.__str__() + + " " + + self.plat.__str__() + + "." + ) # create 1d coordinate variables to enable sel() method filename = self.fsurf_in @@ -247,8 +273,16 @@ def create_datmdomain_at_point(self): """ Create DATM domain file at a single point """ - logging.info("----------------------------------------------------------------------") - logging.info("Creating DATM domain file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + logging.info( + "----------------------------------------------------------------------" + ) + logging.info( + "Creating DATM domain file at " + + self.plon.__str__() + + " " + + self.plat.__str__() + + "." + ) # create 1d coordinate variables to enable sel() method filename = self.fdatmdomain_in @@ -267,7 +301,9 @@ def create_datmdomain_at_point(self): # mode 'w' overwrites file f_out.to_netcdf(path=wfile, mode="w") - logging.info("Successfully created file (fdatmdomain_out) :" + self.fdatmdomain_out) + logging.info( + "Successfully created file (fdatmdomain_out) :" + self.fdatmdomain_out + ) f_in.close() f_out.close() @@ -298,8 +334,16 @@ def create_datm_at_point(self): """ Create all DATM dataset at a point. """ - logging.info("----------------------------------------------------------------------") - logging.info("Creating DATM files at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + logging.info( + "----------------------------------------------------------------------" + ) + logging.info( + "Creating DATM files at " + + self.plon.__str__() + + " " + + self.plat.__str__() + + "." + ) # -- specify subdirectory names and filename prefixes solrdir = "Solar/" precdir = "Precip/" @@ -339,4 +383,4 @@ def create_datm_at_point(self): file_out = outfile[n] self.extract_datm_at(file_in, file_out) - logging.info("All DATM files are created in: "+ self.dir_output_datm+".") + logging.info("All DATM files are created in: " + self.dir_output_datm + ".") diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 6c8cd5c9e0..d1745b11c6 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -95,7 +95,11 @@ from ctsm.utils import str2bool # -- import ctsm logging flags -from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args +from ctsm.ctsm_logging import ( + setup_logging_pre_config, + add_logging_args, + process_logging_args, +) logger = logging.getLogger(__name__) @@ -512,9 +516,9 @@ def main(): fdomain_in = os.path.join( dir_inputdata, "share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc" ) - fdomain_out = os.path.join(dir_output, single_point.add_tag_to_filename( - fdomain_in, single_point.tag - )) + fdomain_out = os.path.join( + dir_output, single_point.add_tag_to_filename(fdomain_in, single_point.tag) + ) single_point.fdomain_in = fdomain_in single_point.fdomain_out = fdomain_out @@ -534,9 +538,9 @@ def main(): "lnd/clm2/surfdata_map/release-clm5.0.18/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc", ) - fsurf_out = os.path.join(dir_output, single_point.add_tag_to_filename( - fsurf_in, single_point.tag - )) + fsurf_out = os.path.join( + dir_output, single_point.add_tag_to_filename(fsurf_in, single_point.tag) + ) single_point.fsurf_in = fsurf_in single_point.fsurf_out = fsurf_out @@ -556,9 +560,9 @@ def main(): "lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc", ) # fluse_out = dir_output + single_point.add_tag_to_filename( fluse_in, single_point.tag ) # remove resolution from filename for singlept cases - fluse_out = os.path.join(dir_output, single_point.add_tag_to_filename( - fluse_in, single_point.tag - )) + fluse_out = os.path.join( + dir_output, single_point.add_tag_to_filename(fluse_in, single_point.tag) + ) single_point.fluse_in = fluse_in single_point.fluse_out = fluse_out @@ -570,9 +574,10 @@ def main(): dir_clm_forcedata, "atm_forcing.datm7.GSWP3.0.5d.v1.c170516/domain.lnd.360x720_gswp3.0v1.c170606.nc", ) - fdatmdomain_out = os.path.join(dir_output_datm, single_point.add_tag_to_filename( - fdatmdomain_in, single_point.tag - )) + fdatmdomain_out = os.path.join( + dir_output_datm, + single_point.add_tag_to_filename(fdatmdomain_in, single_point.tag), + ) single_point.fdatmdomain_in = fdatmdomain_in single_point.fdatmdomain_out = fdatmdomain_out @@ -601,7 +606,6 @@ def main(): single_point.create_datm_at_point() logging.info("Successfully ran script for single point.") - exit() elif args.run_type == "region": logging.info( @@ -674,9 +678,9 @@ def main(): fdomain_in = os.path.join( dir_inputdata, "share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc" ) - fdomain_out = os.path.join(dir_output, region.add_tag_to_filename( - fdomain_in, region.tag - )) + fdomain_out = os.path.join( + dir_output, region.add_tag_to_filename(fdomain_in, region.tag) + ) region.fdomain_in = fdomain_in region.fdomain_out = fdomain_out @@ -686,12 +690,12 @@ def main(): # -- Specify surface data file -------------------------------- fsurf_in = os.path.join( - dir_inputdata - , "lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc" + dir_inputdata, + "lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc", + ) + fsurf_out = os.path.join( + dir_output, region.add_tag_to_filename(fsurf_in, region.tag) ) - fsurf_out = os.path.join(dir_output, region.add_tag_to_filename( - fsurf_in, region.tag - )) region.fsurf_in = fsurf_in region.fsurf_out = fsurf_out @@ -701,12 +705,12 @@ def main(): # -- Specify landuse file ------------------------------------- fluse_in = os.path.join( - dir_inputdata - , "lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc" + dir_inputdata, + "lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc", + ) + fluse_out = os.path.join( + dir_output, region.add_tag_to_filename(fluse_in, region.tag) ) - fluse_out = os.path.join(dir_output, region.add_tag_to_filename( - fluse_in, region.tag - )) region.fluse_in = fluse_in region.fluse_out = fluse_out diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 8f0fe78400..5154420321 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -60,6 +60,4 @@ def str2bool(v): elif v.lower() in ("no", "false", "f", "n", "0"): return False else: - raise argparse.ArgumentTypeError( - "Boolean value expected. [true or false] or [y or n]" - ) + raise ValueError("Boolean value expected. [true or false] or [y or n]") From 420a0fb4aecbdfe66dc1736b8e31f1c48f17b947 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 14 Dec 2021 09:23:37 -0700 Subject: [PATCH 039/223] just reformatting. --- python/ctsm/test/test_unit_subset_data.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py index 06aa79740d..b972a20986 100644 --- a/python/ctsm/test/test_unit_subset_data.py +++ b/python/ctsm/test/test_unit_subset_data.py @@ -10,24 +10,25 @@ # to make readable unit test names # pylint: disable=invalid-name -class TestSubsetData(unittest.TestCase): +class TestSubsetData(unittest.TestCase): def test_plonType_positive(self): result = plon_type(30) - self.assertEqual(result, 30.) + self.assertEqual(result, 30.0) def test_plonType_negative(self): result = plon_type(-30) - self.assertEqual(result, 330.) + self.assertEqual(result, 330.0) def test_plonType_outOfBounds(self): - with self.assertRaisesRegex(argparse.ArgumentTypeError, - "Latitude.*should be between"): + with self.assertRaisesRegex( + argparse.ArgumentTypeError, "Latitude.*should be between" + ): _ = plon_type(361) + """Unit tests for subset_data """ -if __name__ == '__main__': +if __name__ == "__main__": unit_testing.setup_for_tests() unittest.main() - From 9f21042c5a2acfdb3eefdbcba88243bc132dec11 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 14 Dec 2021 09:59:55 -0700 Subject: [PATCH 040/223] formatting and adding some more git utils. --- python/ctsm/git_utils.py | 7 +++++++ python/ctsm/site_and_regional/base_case.py | 1 - python/ctsm/site_and_regional/regional_case.py | 1 - python/ctsm/site_and_regional/single_point_case.py | 1 - 4 files changed, 7 insertions(+), 3 deletions(-) diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py index 651051800e..ac98b7ec92 100644 --- a/python/ctsm/git_utils.py +++ b/python/ctsm/git_utils.py @@ -34,3 +34,10 @@ def get_git_long_hash(): except subprocess.CalledProcessError: sha = "NOT-A-GIT-REPOSITORY" return sha + +def get_git_describe(): + """ + Returns git describe output + """ + label = subprocess.check_output(["git", "describe"]).strip() + return label.decode() diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index fad8a4b6ed..2d1f8d0fa4 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -8,7 +8,6 @@ # -- standard libraries import os import logging -import subprocess from datetime import date from getpass import getuser diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 708e29befb..85c96c61b2 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -7,7 +7,6 @@ # -- 3rd party libraries import numpy as np -import xarray as xr # -- import local classes for this script from ctsm.site_and_regional.base_case import BaseCase diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 3149416b0c..147ea76f4c 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -4,7 +4,6 @@ # -- Import libraries # -- Import Python Standard Libraries -import os import logging # -- 3rd party libraries From f4fdf054993b7a6845959cd46ea6ff4ffc5cd848 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 14 Dec 2021 10:25:58 -0700 Subject: [PATCH 041/223] small changes. --- python/ctsm/git_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py index ac98b7ec92..694f97f1c9 100644 --- a/python/ctsm/git_utils.py +++ b/python/ctsm/git_utils.py @@ -1,4 +1,4 @@ -"""General-purpose utility functions""" +"""General-purpose git utility functions""" import logging import subprocess From 88d1c24f00ce151ba89c5b4ae8132f541a16e5d2 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 10:32:36 -0700 Subject: [PATCH 042/223] refactor to set up files/dictories in one function --- python/ctsm/site_and_regional/base_case.py | 27 +- .../ctsm/site_and_regional/regional_case.py | 85 ++- .../site_and_regional/single_point_case.py | 340 ++++++------ python/ctsm/subset_data.py | 507 ++++++++---------- 4 files changed, 480 insertions(+), 479 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 12197b2e12..bada3a5509 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -1,3 +1,6 @@ +""" +Holds the class BaseCase, parent class to Regional and Single-Point cases +""" import os import subprocess import logging @@ -13,6 +16,7 @@ logger = logging.getLogger(__name__) + class BaseCase: """ Parent class to SinglePointCase and RegionalCase @@ -40,7 +44,8 @@ class BaseCase: updates metadata for a netcdf file and removes attributes that should not be there """ - def __init__(self, create_domain, create_surfdata, create_landuse, create_datm, create_user_mods): + def __init__(self, create_domain, create_surfdata, create_landuse, create_datm, + create_user_mods): self.create_domain = create_domain self.create_surfdata = create_surfdata self.create_landuse = create_landuse @@ -78,22 +83,25 @@ def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): def add_tag_to_filename(filename, tag): """ Add a tag and replace timetag of a filename - # Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc - # Add the tag to just before that ending part - # and change the ending part to the current time tag + Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc + Add the tag to just before that ending part + and change the ending part to the current time tag """ basename = os.path.basename(filename) cend = -10 if basename[cend] == "c": cend = cend - 1 if (basename[cend] != ".") and (basename[cend] != "_"): - logging.error("Trouble figuring out where to add tag to filename:" + filename) + logging.error("Trouble figuring out where to add tag to filename: %s", filename) os.abort() today = date.today() today_string = today.strftime("%y%m%d") return basename[:cend] + "_" + tag + "_c" + today_string + ".nc" def update_metadata(self, nc): + """ + Updates the metadata for a subset netcdf file. + """ # update attributes today = date.today() today_string = today.strftime("%Y-%m-%d") @@ -120,7 +128,7 @@ def update_metadata(self, nc): for attr in del_attrs: if attr in attr_list: - logging.debug ("This attr should be deleted : "+ attr) + logging.debug("This attr should be deleted : " + attr) del nc.attrs[attr] # for attr, value in attr_list.items(): @@ -132,7 +140,8 @@ def get_git_sha(): Returns Git short SHA for the current directory. """ try: - sha = (subprocess.check_output(["git", "-C", os.path.dirname(__file__), "rev-parse", "--short", "HEAD"]).strip().decode()) + sha = (subprocess.check_output(["git", "-C", os.path.dirname(__file__), "rev-parse", + "--short", "HEAD"]).strip().decode()) except subprocess.CalledProcessError: sha = "NOT-A-GIT-REPOSITORY" return sha @@ -143,3 +152,7 @@ def write_to_file(text, file): Writes text to a file, surrounding text with \n characters """ file.write("\n{}\n".format(text)) + + + + diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 91a8293572..638739e82c 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -1,9 +1,9 @@ import logging +import os import numpy as np -import xarray as xr -from ctsm.site_and_regional.base_case import BaseCase +from ctsm.site_and_regional.base_case import BaseCase, USRDAT_DIR logger = logging.getLogger(__name__) @@ -75,12 +75,6 @@ def __init__( self.reg_name = reg_name self.output_dir = output_dir self.tag = None - self.fluse_out = None - self.fluse_in = None - self.fsurf_out = None - self.fsurf_in = None - self.fdomain_out = None - self.fdomain_in = None def create_tag(self): if self.reg_name: @@ -88,11 +82,18 @@ def create_tag(self): else: self.tag = "{}-{}_{}-{}".format(str(self.lon1), str(self.lon2), str(self.lat1), str(self.lat2)) - def create_domain_at_reg(self): - #logging.debug ("Creating domain file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + def create_domain_at_reg(self, indir, file): + + # specify files + fdomain_in = os.path.join(indir, file) + fdomain_out = os.path.join(self.out_dir, "domain.lnd.fv1.9x2.5_gx1v7." + + self.tag + "_170518.nc") + logging.info("fdomain_in: %s", fdomain_in) + logging.info("fdomain_out: %s", fdomain_out) logging.info("Creating domain file at region:"+ self.tag) + # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") + f2 = self.create_1d_coord(fdomain_in, "xc", "yc", "ni", "nj") lat = f2["lat"] lon = f2["lon"] # subset longitude and latitude arrays @@ -102,20 +103,28 @@ def create_domain_at_reg(self): # update attributes self.update_metadata(f3) - f3.attrs["Created_from"] = self.fdomain_in + f3.attrs["Created_from"] = fdomain_in - wfile = self.fdomain_out # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode="w") - logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) + f3.to_netcdf(path=fdomain_out, mode="w") + logging.info("Successfully created file (fdomain_out)" + fdomain_out) f2.close() f3.close() - def create_surfdata_at_reg(self): - #logging.debug ("Creating surface dataset file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + def create_surfdata_at_reg(self, indir, file, user_mods_dir): + logging.info("Creating surface dataset file at region:"+ self.tag) + + # specify files + fsurf_in = os.path.join(indir, file) + fsurf_out = os.path.join(self.out_dir, + "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" + self.tag + + "_c170824.nc") + logging.info("fsurf_in: %s", fsurf_in) + logging.info("fsurf_out: %s", fsurf_out) + # create 1d coordinate variables to enable sel() method - filename = self.fsurf_in + filename = fsurf_in f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") lat = f2["lat"] lon = f2["lon"] @@ -126,20 +135,35 @@ def create_surfdata_at_reg(self): # update attributes self.update_metadata(f3) - f3.attrs["Created_from"] = self.fsurf_in + f3.attrs["Created_from"] = fsurf_in # mode 'w' overwrites file - f3.to_netcdf(path=self.fsurf_out, mode="w") - logging.info("created file (fsurf_out)" + self.fsurf_out) + f3.to_netcdf(path=fsurf_out, mode="w") + logging.info("created file (fsurf_out)" + fsurf_out) # f1.close(); f2.close() f3.close() - def create_landuse_at_reg(self): - #logging.debug ("Creating landuse file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + # write to user_nl_clm if specified + if self.create_user_mods: + with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm: + line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, fsurf_out)) + self.write_to_file(line, nl_clm) + + def create_landuse_at_reg(self, indir, file, user_mods_dir): logging.info("Creating landuse file at region:"+ self.tag) + + # specify files + fluse_in = os.path.join(indir, file) + fluse_out = os.path.join(self.out_dir, + "landuse.timeseries_1.9x2" + ".5_hist_78pfts_CMIP6_simyr1850-2015_" + + self.tag + ".c170824.nc") + logging.info("fluse_in: %s", fluse_in) + logging.info("fluse_out: %s", fluse_out) + # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f2 = self.create_1d_coord(fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") lat = f2["lat"] lon = f2["lon"] # subset longitude and latitude arrays @@ -149,11 +173,16 @@ def create_landuse_at_reg(self): # update attributes self.update_metadata(f3) - f3.attrs["Created_from"] = self.fluse_in + f3.attrs["Created_from"] = fluse_in - wfile = self.fluse_out # mode 'w' overwrites file - f3.to_netcdf(path=wfile, mode="w") - logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) + f3.to_netcdf(path=fluse_out, mode="w") + logging.info("Successfully created file (fluse_out)" + fluse_out) f2.close() f3.close() + + # write to user_nl_clm data if specified + if self.create_user_mods: + with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm: + line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) + self.write_to_file(line, nl_clm) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index f574984952..833a5e6142 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -9,91 +9,92 @@ logger = logging.getLogger(__name__) + class SinglePointCase(BaseCase): """ - A case to encapsulate single point cases. - ... - Attributes - ---------- - plat : float - latitude - plon : float - longitude - site_name: str -- default = None - Site name - overwrite_single_pft : bool - flag to overwrite surface data with one uniform plant functional type - dominant_pft: int - index of plant functional type to set to 100% cover if overwrite_single_pft = True - zero_nonveg_landunits : bool - flag to set surface data to all natural vegetation (100% NATVEG, 0% other) - uniform_snowpack - flag to set the the surface data STD_ELEV to 0.0 - saturation_excess : bool - flag to set the surface data FMAX to 0.0 - output_dir : str - main output directory to write subset files to - tag : str - ending tag for output file naming - fdomain_in : str - file name of input domain file to subset - fdomain_out : str - file name of output subset domain domain file - fluse_in : str - file name of input land use file to subset - fluse_out : str - file name of output subset land use file - fsurf_in : str - file name of input surface data file to subset - fsurf_out : str - file name of output subset surface data file - fdatmdomain_in : str - file name of input DATM domain file to subset - fdatmdomain_out : str - file name of output subset DATM domain file - datm_syr : int - starting year for subset DATM data - datm_eyr : int - ending year for subset DATM data - dir_tpqw : str - input directory for TPQW DATM data - dir_prec : str - input directory for precipitation DATM data - dir_solar : str - input directory for solar DATM data - tag_tpqw : str - tag (file naming convention) for input TPQW DATM data - tag_prec : str - tag (file naming convention) for input precipitation DATM data - tag_solar : str - tag (file naming convention) for input solar DATM data - name_tpqw : str - stream name for TPQW DATM data - name_prec : str - stream name for precipitation DATM data - name_solar : str - stream name for solar DATM data - dir_output_datm : str - directory to write subset DATM data to (default to within main output directory) - datm_stream_file : str - file name of usr_nl_datm_streams file to write to for user_mods creation - - Methods - ------- - create_tag: - create a tag for single point which is the site name - or the "lon-lat" format if the site name does not exist. - create_fileout_name: - creates a file name from a basename and a specified tag - create_domain_at_point: - Create domain file at a single point. - create_landuse_at_point: - Create landuse file at a single point. - create_surfdata_at_point: - Create surface dataset at a single point. - create_datmdomain_at_point: - Create DATM domain file at a single point. - """ + A case to encapsulate single point cases. + ... + Attributes + ---------- + plat : float + latitude + plon : float + longitude + site_name: str -- default = None + Site name + overwrite_single_pft : bool + flag to overwrite surface data with one uniform plant functional type + dominant_pft: int + index of plant functional type to set to 100% cover if overwrite_single_pft = True + zero_nonveg_landunits : bool + flag to set surface data to all natural vegetation (100% NATVEG, 0% other) + uniform_snowpack + flag to set the the surface data STD_ELEV to 0.0 + saturation_excess : bool + flag to set the surface data FMAX to 0.0 + output_dir : str + main output directory to write subset files to + tag : str + ending tag for output file naming + fdomain_in : str + file name of input domain file to subset + fdomain_out : str + file name of output subset domain domain file + fluse_in : str + file name of input land use file to subset + fluse_out : str + file name of output subset land use file + fsurf_in : str + file name of input surface data file to subset + fsurf_out : str + file name of output subset surface data file + fdatmdomain_in : str + file name of input DATM domain file to subset + fdatmdomain_out : str + file name of output subset DATM domain file + datm_syr : int + starting year for subset DATM data + datm_eyr : int + ending year for subset DATM data + dir_tpqw : str + input directory for TPQW DATM data + dir_prec : str + input directory for precipitation DATM data + dir_solar : str + input directory for solar DATM data + tag_tpqw : str + tag (file naming convention) for input TPQW DATM data + tag_prec : str + tag (file naming convention) for input precipitation DATM data + tag_solar : str + tag (file naming convention) for input solar DATM data + name_tpqw : str + stream name for TPQW DATM data + name_prec : str + stream name for precipitation DATM data + name_solar : str + stream name for solar DATM data + dir_output_datm : str + directory to write subset DATM data to (default to within main output directory) + datm_stream_file : str + file name of usr_nl_datm_streams file to write to for user_mods creation + + Methods + ------- + create_tag: + create a tag for single point which is the site name + or the "lon-lat" format if the site name does not exist. + create_fileout_name: + creates a file name from a basename and a specified tag + create_domain_at_point: + Create domain file at a single point. + create_landuse_at_point: + Create landuse file at a single point. + create_surfdata_at_point: + Create surface dataset at a single point. + create_datmdomain_at_point: + Create DATM domain file at a single point. + """ def __init__( self, @@ -112,7 +113,8 @@ def __init__( saturation_excess, output_dir, ): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm, create_user_mods) + super().__init__(create_domain, create_surfdata, create_landuse, create_datm, + create_user_mods) self.plat = plat self.plon = plon self.site_name = site_name @@ -123,27 +125,6 @@ def __init__( self.saturation_excess = saturation_excess self.output_dir = output_dir self.tag = None - self.fdomain_in = None - self.fdomain_out = None - self.fluse_in = None - self.fluse_out = None - self.fsurf_in = None - self.fsurf_out = None - self.fdatmdomain_in = None - self.fdatmdomain_out = None - self.datm_syr = None - self.datm_eyr = None - self.name_tpqw = None - self.name_prec = None - self.name_solar = None - self.dir_output_datm = None - self.dir_tpqw = None - self.tag_tpqw = None - self.dir_prec = None - self.tag_prec = None - self.dir_input_datm = None - self.tag_solar = None - self.dir_solar = None self.datm_streams_file = None def create_tag(self): @@ -163,11 +144,20 @@ def create_fileout_name(filename, tag): return new_string - def create_domain_at_point(self): + def create_domain_at_point(self, indir, file): logging.info("----------------------------------------------------------------------") - logging.info("Creating domain file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + logging.info( + "Creating domain file at " + self.plon.__str__() + " " + self.plat.__str__() + ".") + + # specify files + fdomain_in = os.path.join(indir, file) + fdomain_out = self.add_tag_to_filename(fdomain_in, self.tag) + logging.info("fdomain_in: %s", fdomain_in) + logging.info("fdomain_out: %s", os.path.join(self.output_dir, fdomain_out)) + # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") + f2 = self.create_1d_coord(fdomain_in, "xc", "yc", "ni", "nj") + # extract gridcell closest to plon/plat f3 = f2.sel(ni=self.plon, nj=self.plat, method="nearest") # expand dimensions @@ -175,19 +165,27 @@ def create_domain_at_point(self): # update attributes self.update_metadata(f3) - f3.attrs["Created_from"] = self.fdomain_in + f3.attrs["Created_from"] = fdomain_in - wfile = os.path.join(self.output_dir, self.fdomain_out) + wfile = os.path.join(self.output_dir, fdomain_out) f3.to_netcdf(path=wfile, mode="w") logging.info("Successfully created file (fdomain_out) at" + wfile) f2.close() f3.close() - def create_landuse_at_point(self): + def create_landuse_at_point(self, indir, file, user_mods_dir): logging.info("----------------------------------------------------------------------") - logging.info("Creating landuse file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + logging.info( + "Creating landuse file at " + self.plon.__str__() + " " + self.plat.__str__() + ".") + + # specify files + fluse_in = os.path.join(indir, file) + fluse_out = self.create_fileout_name(fluse_in, self.tag) + logging.info("fluse_in: %s", fluse_in) + logging.info("fluse_out: %s", os.path.join(self.output_dir, fluse_out)) + # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + f2 = self.create_1d_coord(fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat f3 = f2.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") @@ -207,20 +205,34 @@ def create_landuse_at_point(self): # update attributes self.update_metadata(f3) - f3.attrs["Created_from"] = self.fluse_in + f3.attrs["Created_from"] = fluse_in - wfile = os.path.join(self.output_dir, self.fluse_out) + wfile = os.path.join(self.output_dir, fluse_out) # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") logging.info("Successfully created file (fluse_out) at " + wfile) f2.close() f3.close() - def create_surfdata_at_point(self): + # write to user_nl_clm data if specified + if self.create_user_mods: + with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm: + line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) + self.write_to_file(line, nl_clm) + + def create_surfdata_at_point(self, indir, file, user_mods_dir): logging.info("----------------------------------------------------------------------") - logging.info("Creating surface dataset file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + logging.info( + "Creating surface dataset file at " + self.plon.__str__() + " " + self.plat.__str__() + ".") + + # specify file + fsurf_in = os.path.join(indir, file) + fsurf_out = self.create_fileout_name(fsurf_in, self.tag) + logging.info("fsurf_in: %s", fsurf_in) + logging.info("fsurf_out: %s", os.path.join(self.output_dir, fsurf_out)) + # create 1d coordinate variables to enable sel() method - filename = os.path.join(self.output_dir, self.fsurf_in) + filename = os.path.join(self.output_dir, fsurf_in) f2 = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat f3 = f2.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") @@ -268,27 +280,43 @@ def create_surfdata_at_point(self): # update attributes self.update_metadata(f3) - f3.attrs["Created_from"] = self.fsurf_in + f3.attrs["Created_from"] = fsurf_in del f3.attrs["History_Log"] # mode 'w' overwrites file - f3.to_netcdf(path=os.path.join(self.output_dir, self.fsurf_out), mode="w") - logging.info("Successfully created file (fsurf_out) at " + os.path.join(self.output_dir, self.fsurf_out)) + f3.to_netcdf(path=os.path.join(self.output_dir, fsurf_out), mode="w") + logging.info("Successfully created file (fsurf_out) at " + os.path.join(self.output_dir, + fsurf_out)) f2.close() f3.close() - def create_datmdomain_at_point(self): + # write to user_nl_clm if specified + if self.create_user_mods: + with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm: + line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, fsurf_out)) + self.write_to_file(line, nl_clm) + + def create_datmdomain_at_point(self, indir, file, dir_output_datm): logging.info("----------------------------------------------------------------------") - logging.info("Creating DATM domain file at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + logging.info( + "Creating DATM domain file at " + self.plon.__str__() + " " + self.plat.__str__() + ".") + + # specify files + fdatmdomain_in = os.path.join(indir, file) + datm_file = self.add_tag_to_filename(fdatmdomain_in, self.tag) + fdatmdomain_out = os.path.join(dir_output_datm, datm_file) + logging.info("fdatmdomain_in: %s", fdatmdomain_in) + logging.info("fdatmdomain out: %s", os.path.join(self.output_dir, fdatmdomain_out)) + # create 1d coordinate variables to enable sel() method - f2 = self.create_1d_coord(self.fdatmdomain_in, "xc", "yc", "ni", "nj") + f2 = self.create_1d_coord(fdatmdomain_in, "xc", "yc", "ni", "nj") # extract gridcell closest to plon/plat f3 = f2.sel(ni=self.plon, nj=self.plat, method="nearest") # expand dimensions f3 = f3.expand_dims(["nj", "ni"]) - wfile = os.path.join(self.output_dir, self.fdatmdomain_out) + wfile = os.path.join(self.output_dir, fdatmdomain_out) # update attributes self.update_metadata(f3) - f3.attrs["Created_from"] = self.fdatmdomain_in + f3.attrs["Created_from"] = fdatmdomain_in # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") logging.info("Successfully created file (fdatmdomain_out) at " + wfile) @@ -317,17 +345,14 @@ def extract_datm_at(self, file_in, file_out): def write_shell_commands(self, file): """ writes out xml commands commands to a file (i.e. shell_commands) for single-point runs - - file - file connection to shell_commands file """ - # write_to_file surrounds text with newlines - self.write_to_file("! Change below line if you move the subset data directory", file) - self.write_to_file("./xmlchange {}={}".format(USRDAT_DIR, self.output_dir), file) - self.write_to_file("./xmlchange PTS_LON={}".format(str(self.plon)), file) - self.write_to_file("./xmlchange PTS_LAT={}".format(str(self.plat)), file) - self.write_to_file("./xmlchange MPILIB=mpi-serial", file) - file.close() + with open(file, 'w'): + self.write_to_file("# Change below line if you move the subset data directory", file) + self.write_to_file("./xmlchange {}={}".format(USRDAT_DIR, self.output_dir), file) + self.write_to_file("./xmlchange PTS_LON={}".format(str(self.plon)), file) + self.write_to_file("./xmlchange PTS_LAT={}".format(str(self.plat)), file) + self.write_to_file("./xmlchange MPILIB=mpi-serial", file) def write_datm_streams_lines(self, streamname, datmfiles, file): """ @@ -342,9 +367,10 @@ def write_datm_streams_lines(self, streamname, datmfiles, file): self.write_to_file("{}:mapalgo=none".format(streamname), file) self.write_to_file("{}:meshfile=none".format(streamname), file) - def create_datm_at_point(self): + def create_datm_at_point(self, file_dict, datm_syr, datm_eyr, datm_streams_file): logging.info("----------------------------------------------------------------------") - logging.info("Creating DATM files at "+ self.plon.__str__()+" "+ self.plat.__str__()+".") + logging.info( + "Creating DATM files at " + self.plon.__str__() + " " + self.plat.__str__() + ".") # -- create data files infile = [] @@ -352,7 +378,7 @@ def create_datm_at_point(self): solarfiles = [] precfiles = [] tpqwfiles = [] - for y in range(self.datm_syr, self.datm_eyr + 1): + for y in range(datm_syr, datm_eyr + 1): ystr = str(y) for m in range(1, 13): mstr = str(m) @@ -361,21 +387,27 @@ def create_datm_at_point(self): dtag = ystr + "-" + mstr - fsolar = os.path.join(self.dir_input_datm, self.dir_solar, "{}{}.nc".format(self.tag_solar, dtag)) - fsolar2 = "{}{}.{}.nc".format(self.tag_solar, self.tag, dtag) - fprecip = os.path.join(self.dir_input_datm, self.dir_prec, "{}{}.nc".format(self.tag_prec, dtag)) - fprecip2 = "{}{}.{}.nc".format(self.tag_prec, self.tag, dtag) - ftpqw = os.path.join(self.dir_input_datm, self.dir_tpqw, "{}{}.nc".format(self.tag_tpqw, dtag)) - ftpqw2 = "{}{}.{}.nc".format(self.tag_tpqw, self.tag, dtag) - - outdir = os.path.join(self.output_dir, self.dir_output_datm) + fsolar = os.path.join(file_dict.datm_indir, file_dict.dir_solar, + "{}{}.nc".format(file_dict.tag_solar, dtag)) + fsolar2 = "{}{}.{}.nc".format(file_dict.tag_solar, self.tag, dtag) + fprecip = os.path.join(file_dict.datm_indir, file_dict.dir_prec, + "{}{}.nc".format(file_dict.tag_prec, dtag)) + fprecip2 = "{}{}.{}.nc".format(file_dict.tag_prec, self.tag, dtag) + ftpqw = os.path.join(file_dict.datm_indir, file_dict.dir_tpqw, + "{}{}.nc".format(file_dict.tag_tpqw, dtag)) + ftpqw2 = "{}{}.{}.nc".format(file_dict.tag_tpqw, self.tag, dtag) + + outdir = os.path.join(self.output_dir, file_dict.datm_outdir) infile += [fsolar, fprecip, ftpqw] outfile += [os.path.join(outdir, fsolar2), os.path.join(outdir, fprecip2), os.path.join(outdir, ftpqw2)] - solarfiles.append(os.path.join("${}".format(USRDAT_DIR), self.dir_output_datm, fsolar2)) - precfiles.append(os.path.join("${}".format(USRDAT_DIR), self.dir_output_datm, fprecip2)) - tpqwfiles.append(os.path.join("${}".format(USRDAT_DIR), self.dir_output_datm, ftpqw2)) + solarfiles.append( + os.path.join("${}".format(USRDAT_DIR), file_dict.datm_outdir, fsolar2)) + precfiles.append( + os.path.join("${}".format(USRDAT_DIR), file_dict.datm_outdir, fprecip2)) + tpqwfiles.append( + os.path.join("${}".format(USRDAT_DIR), file_dict.datm_outdir, ftpqw2)) nm = len(infile) for n in range(nm): @@ -384,11 +416,11 @@ def create_datm_at_point(self): file_out = outfile[n] self.extract_datm_at(file_in, file_out) - logging.info("All DATM files are created in: "+ self.dir_output_datm+".") + logging.info("All DATM files are created in: " + file_dict.datm_outdir + ".") # write to user_nl_datm_streams if specified if self.create_user_mods: - with open(self.datm_streams_file, "a") as file: - self.write_datm_streams_lines(self.name_solar, solarfiles, file) - self.write_datm_streams_lines(self.name_prec, precfiles, file) - self.write_datm_streams_lines(self.name_tpqw, tpqwfiles, file) + with open(datm_streams_file, "a") as file: + self.write_datm_streams_lines(file_dict.name_solar, solarfiles, file) + self.write_datm_streams_lines(file_dict.name_prec, precfiles, file) + self.write_datm_streams_lines(file_dict.name_tpqw, tpqwfiles, file) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index f6b5a744cf..5faa3f3560 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -43,15 +43,12 @@ from __future__ import print_function import sys import os -import string import logging import subprocess import argparse import configparser -from datetime import date from getpass import getuser -from logging.handlers import RotatingFileHandler from argparse import ArgumentParser import textwrap @@ -59,18 +56,17 @@ from ctsm.site_and_regional.regional_case import RegionalCase from ctsm.site_and_regional.single_point_case import SinglePointCase from ctsm.path_utils import path_to_ctsm_root - -_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", 'python')) -sys.path.insert(1, _CTSM_PYTHON) - -DEFAULTS_FILE = "default_data.cfg" - from ctsm.ctsm_logging import ( setup_logging_pre_config, add_logging_args, process_logging_args, ) +_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", 'python')) +sys.path.insert(1, _CTSM_PYTHON) + +DEFAULTS_FILE = "default_data.cfg" + logger = logging.getLogger(__name__) @@ -223,7 +219,8 @@ def get_parser(): ) subparser.add_argument( "--create-surface", - help="Flag for creating surface data file at single point/region. [default: %(default)s]", + help="Flag for creating surface data file at single point/region. [default: %(" + "default)s]", action="store", dest="create_surfdata", type=str2bool, @@ -234,7 +231,8 @@ def get_parser(): ) subparser.add_argument( "--create-landuse", - help="Flag for creating landuse data file at single point/region. [default: %(default)s]", + help="Flag for creating landuse data file at single point/region. [default: %(" + "default)s]", action="store", dest="create_landuse", type=str2bool, @@ -245,7 +243,8 @@ def get_parser(): ) subparser.add_argument( "--create-datm", - help="Flag for creating DATM forcing data at single point/region. [default: %(default)s]", + help="Flag for creating DATM forcing data at single point/region. [default: %(" + "default)s]", action="store", dest="create_datm", type=str2bool, @@ -267,7 +266,8 @@ def get_parser(): ) subparser.add_argument( "--datm-syr", - help="Start year for creating DATM forcing at single point/region. [default: %(default)s]", + help="Start year for creating DATM forcing at single point/region. [default: %(" + "default)s]", action="store", dest="datm_syr", required=False, @@ -276,7 +276,8 @@ def get_parser(): ) subparser.add_argument( "--datm-eyr", - help="End year for creating DATM forcing at single point/region. [default: %(default)s]", + help="End year for creating DATM forcing at single point/region. " + "[default: %(default)s]", action="store", dest="datm_eyr", required=False, @@ -285,7 +286,8 @@ def get_parser(): ) subparser.add_argument( "--crop", - help="Flag for creating datasets using the extensive list of prognostic crop types. [default: %(default)s]", + help="Flag for creating datasets using the extensive list of prognostic crop types. [" + "default: %(default)s]", action="store", dest="crop_flag", type=str2bool, @@ -352,10 +354,9 @@ def str2bool(v): return v if v.lower() in ("yes", "true", "t", "y", "1"): return True - elif v.lower() in ("no", "false", "f", "n", "0"): + if v.lower() in ("no", "false", "f", "n", "0"): return False - else: - raise argparse.ArgumentTypeError("Boolean value expected. [true or false] or [y or n]") + raise argparse.ArgumentTypeError("Boolean value expected. [true or false] or [y or n]") def plat_type(x): @@ -389,41 +390,211 @@ def plon_type(x): x(float): converted longitude between 0 and 360 """ x = float(x) - if (-180 < x) and (x < 0): - print("lon is: ", x) + if -180 <= x < 0: + logging.info("lon is: %f", x) x = x % 360 - print("after modulo lon is: ", x) - if (x < 0) or (x > 360): - raise argparse.ArgumentTypeError("ERROR: Latitude of single point should be between 0 and 360 or -180 and 180.") + logging.info("after modulo lon is: %f", x) + if x < 0 or x > 360: + raise argparse.ArgumentTypeError("ERROR: Latitude of single point should be between 0 and " + "360 or -180 and 180.") return x -def get_git_sha(): +def setup_user_mods(out_dir, user_mods_dir, cesmroot): """ - Returns Git short SHA for the currect directory. + Sets up the user mods files and directories """ - try: + if user_mods_dir == "": + user_mods_dir = os.path.join(out_dir, "user_mods") + if not os.path.isdir(user_mods_dir): + os.mkdir(user_mods_dir) + + nl_clm_base = os.path.join(cesmroot, "cime_config/user_nl_clm") + nl_clm = os.path.join(user_mods_dir, "user_nl_clm") + with open(nl_clm_base, "r") as basefile, open(nl_clm, "w") as user_file: + for line in basefile: + user_file.write(line) + + nl_datm_base = os.path.join(cesmroot, "components/cdeps/datm/cime_config" + "/user_nl_datm_streams") + nl_datm = os.path.join(user_mods_dir, "user_nl_datm_streams") + with open(nl_datm_base, "r") as base_file, open(nl_datm, 'w') as user_file: + for line in base_file: + user_file.write(line) + + +def setup_files(args, defaults): + """ + Sets up the files and folders needed for this program + """ + if not os.path.isdir(args.out_dir): + os.mkdir(args.out_dir) - # os.abspath(__file__) - sha = ( - subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) - .strip() - .decode() - ) - except subprocess.CalledProcessError: - sha = "NOT-A-GIT-REPOSITORY" - return sha + if args.create_user_mods: + setup_user_mods(args.out_dir, args.user_mods_dir) + + # DATM data + dir_output_datm = "datmdata" + dir_input_datm = defaults.get("datm_gswp3", "dir") + if args.create_datm: + if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)): + os.mkdir(os.path.join(args.out_dir, dir_output_datm)) + logging.info("dir_input_datm : %s", dir_input_datm) + logging.info("dir_output_datm: %s", os.path.join(args.out_dir, dir_output_datm)) + + # if the crop flag is on - we need to use a different land use and surface data file + if args.crop_flag: + num_pft = "78" + fsurf_in = defaults.get("surfdat", "surfdat_78pft") + fluse_in = defaults.get("landuse", "landuse_78pft") + else: + num_pft = "16" + fsurf_in = defaults.get("surfdat", "surfdat_16pft") + fluse_in = defaults.get("landuse", "landuse_16pft") + logging.debug("crop_flag = {} => num_pft = {}".format(args.crop_flag.__str__(), num_pft)) + + datm_type = 'datm_gswp3' + file_dict = {'main_dir': defaults.get("main", "clmforcingindir"), + 'fdomain_in': defaults.get("domain", "file"), + 'fsurf_dir': os.path.join(defaults.get("main", "clmforcingindir"), + os.path.join(defaults.get("surfdat", "dir"))), + 'fluse_dir': os.path.join(defaults.get("main", "clmforcingindir"), + os.path.join(defaults.get("landuse", "dir"))), + 'fsurf_in': fsurf_in, + 'fluse_in': fluse_in, + 'fdatmdomain_in': defaults.get(datm_type, "domain"), + 'datm_indir': dir_input_datm, + 'datm_outdir': dir_output_datm, + 'dir_solar': defaults.get(datm_type, 'solardir'), + 'dir_prec': defaults.get(datm_type, 'precdir'), + 'dir_tpqw': defaults.get(datm_type, 'tpqwdir'), + 'tag_solar': defaults.get(datm_type, 'solartag'), + 'tag_prec': defaults.get(datm_type, 'prec_tag'), + 'tag_tpqw': defaults.get(datm_type, 'tpqwtag'), + 'name_solar': defaults.get(datm_type, 'solarname'), + 'name_prec': defaults.get(datm_type, 'precname'), + 'name_tpqw': defaults.get(datm_type, 'tpqwname') + } + + return file_dict + + +def subset_point(args, file_dict): + """ + Subsets surface, domain, land use, and/or DATM files at a single point + """ + + logging.info("----------------------------------------------------------------------------") + logging.info("This script extracts a single point from the global CTSM datasets.") + + # -- Create SinglePoint Object + single_point = SinglePointCase( + args.plat, + args.plon, + args.site_name, + args.create_domain, + args.create_surfdata, + args.create_landuse, + args.create_datm, + args.create_user_mods, + args.overwrite_single_pft, + args.dom_pft, + args.zero_nonveg, + args.uni_snow, + args.saturation_excess, + args.out_dir, + ) + + single_point.create_tag() + logging.debug(single_point) + + # -- Create CTSM domain file + if single_point.create_domain: + single_point.create_domain_at_point(file_dict.main_dir, file_dict.fdomain_in) + + # -- Create CTSM surface data file + if single_point.create_surfdata: + single_point.create_surfdata_at_point(file_dict.fsurf_dir, file_dict.fsurf_in, + args.user_mods_dir) + + # -- Create CTSM transient landuse data file + if single_point.create_landuse: + single_point.create_landuse_at_point(file_dict.fluse_dir, file_dict.fluse_in, + args.user_mods_dir) + + # -- Create single point atmospheric forcing data + if single_point.create_datm: + + # subset DATM domain file + single_point.create_datmdomain_at_point(file_dict.datm_indir, file_dict.fdatmdomain_in, + file_dict.datm_outdir) + + # subset the DATM data + nl_datm = os.path.join(args.user_mods_dir, "user_nl_datm_streams") + single_point.create_datm_at_point(file_dict, args.datm_syr, args.datm_eyr, + nl_datm) + + # -- Write shell commands + if single_point.create_user_mods: + single_point.write_shell_commands(os.path.join(args.user_mods_dir, "shell_commands")) + + logging.info("Successfully ran script for single point.") + + +def subset_region(args, file_dict): + """ + Subsets surface, domain, land use, and/or DATM files for a region + """ + + logging.info("----------------------------------------------------------------------------") + logging.info("This script extracts a region from the global CTSM datasets.") + + # -- Create Region Object + region = RegionalCase( + args.lat1, + args.lat2, + args.lon1, + args.lon2, + args.reg_name, + args.create_domain, + args.create_surfdata, + args.create_landuse, + args.create_datm, + args.create_user_mods, + args.out_dir, + ) + + region.create_tag() + logging.debug(region) + + # -- Create CTSM domain file + if region.create_domain: + region.create_domain_at_reg(file_dict.main_dir, file_dict.fdomain_in) + + # -- Create CTSM surface data file + if region.create_surfdata: + region.create_surfdata_at_reg(file_dict.fsurf_dir, file_dict.fsurf_in, + args.user_mods_dir) + + # -- Create CTSM transient landuse data file + if region.create_landuse: + region.create_landuse_at_reg(file_dict.fluse_dir, file_dict.fluse_in, + args.user_mods_dir) + + logging.info("Successfully ran script for a regional case.") + sys.exit() def main(): + """ + Calls functions that subset surface, landuse, domain, and/or DATM files. + """ - # -- add logging flags from ctsm_logging + # add logging flags from ctsm_logging setup_logging_pre_config() parser = get_parser() add_logging_args(parser) - args = parser.parse_args() - process_logging_args(args) # parse defaults file @@ -433,266 +604,22 @@ def main(): # --------------------------------- # - today = date.today() - today_string = today.strftime("%Y%m%d") - myname = getuser() - pwd = os.getcwd() - - # log_file = os.path.join(pwd, today_string + ".log") - - # log_level = logging.DEBUG - # setup_logging(log_file, log_level) - # log = logging.getLogger(__name__) - - logging.info("User = " + myname) - logging.info("Current directory = " + pwd) + logging.info("User = %s", myname) + logging.info("Current directory = %s", pwd) # --------------------------------- # # print help and exit when no option is chosen - if (args.run_type != "point" and args.run_type != "reg"): + if args.run_type != "point" and args.run_type != "reg": get_parser().print_help() - quit() + sys.exit() - # if the crop flag is on - we need to use a different landuse and surface - # data file - if args.crop_flag: - num_pft = "78" - fsurf_in = defaults.get("surfdat", "surfdat_78pft") - fluse_in = defaults.get("landuse", "landuse_78pft") - else: - num_pft = "16" - fsurf_in = defaults.get("surfdat", "surfdat_16pft") - fluse_in = defaults.get("landuse", "landuse_16pft") - - logging.debug("crop_flag = {} => num_pft = {}".format(args.crop_flag.__str__(), num_pft)) - - # -- Specify input and output directories and files - - # Top-level output directory - if not os.path.isdir(args.out_dir): - os.mkdir(args.out_dir) - - # DATM data - dir_output_datm = "datmdata" - dir_input_datm = defaults.get("datm_gswp3", "dir") - if args.create_datm: - if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)): - os.mkdir(os.path.join(args.out_dir, dir_output_datm)) - logging.info("dir_input_datm : ", dir_input_datm) - logging.info("dir_output_datm: ", os.path.join(args.out_dir, dir_output_datm)) - - # -- Set up user mods directories and base files - if args.create_user_mods: - if args.user_mods_dir == "": - args.user_mods_dir = os.path.join(args.out_dir, "user_mods") - if not os.path.isdir(args.user_mods_dir): - os.mkdir(args.user_mods_dir) - - - - # -- Create empty user_nl_clm file - if args.create_surfdata or args.create_landuse: - nl_clm_base = os.path.join(cesmroot, "cime_config/user_nl_clm") - nl_clm = os.path.join(args.user_mods_dir, "user_nl_clm") - with open(nl_clm_base, "r") as basefile, open(nl_clm, "w") as user_file: - for line in basefile: - user_file.write(line) - - # -- Create empty user_nl_datm_streams file - if args.create_datm: - nl_datm_base = os.path.join(cesmroot, "components/cdeps/datm/cime_config/user_nl_datm_streams") - nl_datm = os.path.join(args.user_mods_dir, "user_nl_datm_streams") - with open(nl_datm_base, "r") as base_file, open(nl_datm, 'w') as user_file: - for line in base_file: - user_file.write(line) - - # Default files - dir_inputdata = defaults.get("main", "clmforcingindir") - dir_inputsurf = defaults.get("surfdat", "dir") - dir_inputluse = defaults.get("landuse", "dir") - fdomain_in = os.path.join(dir_inputdata, defaults.get("domain", "file")) - fdatmdomain_in = os.path.join(dir_input_datm, defaults.get("datm_gswp3", "domain")) - datm_solardir = defaults.get("datm_gswp3", "solardir") - datm_precdir = defaults.get("datm_gswp3", "precdir") - datm_tpqwdir = defaults.get("datm_gswp3", "tpqwdir") - datm_solartag = defaults.get("datm_gswp3", "solartag") - datm_prectag = defaults.get("datm_gswp3", "prectag") - datm_tpqwtag = defaults.get("datm_gswp3", "tpqwtag") - datm_solarname = defaults.get("datm_gswp3", "solarname") - datm_precname = defaults.get("datm_gswp3", "precname") - datm_tpqwname = defaults.get("datm_gswp3", "tpqwname") + # create files and folders necessary and return dictionary of file/folder locations + file_dict = setup_files(args, defaults) if args.run_type == "point": - logging.info("----------------------------------------------------------------------------") - logging.info("This script extracts a single point from the global CTSM datasets.") - - # -- Create SinglePoint Object - single_point = SinglePointCase( - args.plat, - args.plon, - args.site_name, - args.create_domain, - args.create_surfdata, - args.create_landuse, - args.create_datm, - args.create_user_mods, - args.overwrite_single_pft, - args.dom_pft, - args.zero_nonveg, - args.uni_snow, - args.saturation_excess, - args.out_dir, - ) - single_point.create_tag() - - if single_point.create_user_mods and single_point.create_datm: - single_point.datm_streams_file = nl_datm - - logging.debug(single_point) - - # -- Create CTSM domain file - if single_point.create_domain: - # -- Specify land domain file --------------------------------- - single_point.fdomain_in = os.path.join(dir_inputdata, fdomain_in) - single_point.fdomain_out = single_point.add_tag_to_filename(fdomain_in, single_point.tag) - logging.info("fdomain_in: ", single_point.fdomain_in) - logging.info("fdomain_out: ", os.path.join(single_point.output_dir, single_point.fdomain_out)) - single_point.create_domain_at_point() - - # -- Create CTSM surface data file - if single_point.create_surfdata: - # -- Specify surface file --------------------------------- - single_point.fsurf_in = os.path.join(dir_inputdata, dir_inputsurf, fsurf_in) - single_point.fsurf_out = single_point.create_fileout_name(fsurf_in, single_point.tag) - logging.info("fsurf_in: ", single_point.fsurf_in) - logging.info("fsurf_out: ", os.path.join(single_point.output_dir, single_point.fsurf_out)) - single_point.create_surfdata_at_point() - - # write to user_nl_clm if specified - if args.create_user_mods: - nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") - line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, single_point.fsurf_out)) - single_point.write_to_file(line, nl_clm) - nl_clm.close() - - # -- Create CTSM transient landuse data file - if single_point.create_landuse: - # -- Specify surface file --------------------------------- - single_point.fluse_in = os.path.join(dir_inputdata, dir_inputluse, fluse_in) - single_point.fluse_out = single_point.create_fileout_name(fluse_in, single_point.tag) - logging.info("fluse_in: ", single_point.fluse_in) - logging.info("fluse_out: ", os.path.join(single_point.output_dir, single_point.fluse_out)) - single_point.create_landuse_at_point() - - # write to user_nl_clm data if specified - if single_point.create_user_mods: - nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") - line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, single_point.fluse_out)) - single_point.write_to_file(line, nl_clm) - nl_clm.close() - - # -- Create single point atmospheric forcing data - if single_point.create_datm: - # -- Specify datm and subset domain file --------------------------------- - single_point.fdatmdomain_in = os.path.join(dir_input_datm, fdatmdomain_in) - datm_file = single_point.add_tag_to_filename(single_point.fdatmdomain_in, single_point.tag) - single_point.fdatmdomain_out = os.path.join(dir_output_datm, datm_file) - logging.info("fdatmdomain_in: ", single_point.fdatmdomain_in) - logging.info("fdatmdomain out: ", os.path.join(single_point.output_dir, single_point.fdatmdomain_out)) - single_point.create_datmdomain_at_point() - - # -- Specify DATM directories, tags, and stream names - single_point.datm_syr = args.datm_syr - single_point.datm_eyr = args.datm_eyr - single_point.dir_input_datm = dir_input_datm - single_point.dir_output_datm = dir_output_datm - single_point.dir_solar = datm_solardir - single_point.dir_prec = datm_precdir - single_point.dir_tpqw = datm_tpqwdir - single_point.tag_solar = datm_solartag - single_point.tag_prec = datm_prectag - single_point.tag_tpqw = datm_tpqwtag - single_point.name_solar = datm_solarname - single_point.name_prec = datm_precname - single_point.name_tpqw = datm_tpqwname - single_point.create_datm_at_point() - - # -- Write shell commands - if single_point.create_user_mods: - shell_commands_file = open(os.path.join(args.user_mods_dir, "shell_commands"), "w") - single_point.write_shell_commands(shell_commands_file) - - logging.info("Successfully ran script for single point.") - exit() - + subset_point(args, file_dict) elif args.run_type == "reg": - logging.info("----------------------------------------------------------------------------") - logging.info("This script extracts a region from the global CTSM datasets.") - - # -- Create Region Object - region = RegionalCase( - args.lat1, - args.lat2, - args.lon1, - args.lon2, - args.reg_name, - args.create_domain, - args.create_surfdata, - args.create_landuse, - args.create_datm, - args.create_user_mods, - args.out_dir, - ) - region.create_tag() - - logging.debug(region) - - # -- Create CTSM domain file - if region.create_domain: - # -- Specify land domain file --------------------------------- - region.fdomain_in = os.path.join(dir_inputdata, fdomain_in) - region.fdomain_out = os.path.join(args.out_dir, "domain.lnd.fv1.9x2.5_gx1v7." + region.tag + "_170518.nc") - logging.info("fdomain_in: ", region.fdomain_in) - logging.info("fdomain_out: ", region.fdomain_out) - region.create_domain_at_reg() - - # -- Create CTSM surface data file - if region.create_surfdata: - # -- Specify surface file --------------------------------- - region.fsurf_in = os.path.join(dir_inputdata, dir_inputsurf, fsurf_in) - region.fsurf_out = os.path.join(args.out_dir, "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" + region.tag - + "_c170824.nc") - logging.info("fsurf_in: ", region.fsurf_in) - logging.info("fsurf_out: ", region.fsurf_out) - region.create_surfdata_at_reg() - - # write to user_nl_clm if specified - if args.create_user_mods: - nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") - line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, region.fsurf_out)) - region.write_to_file(line, nl_clm) - nl_clm.close() - - # -- Create CTSM transient landuse data file - if region.create_landuse: - # -- Specify surface file --------------------------------- - region.fluse_in = os.path.join(dir_inputdata, dir_inputluse, fluse_in) - region.fluse_out = os.path.join(args.out_dir, - "landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_" + - region.tag + ".c170824.nc") - logging.info("fluse_in: ", region.fluse_in) - logging.info("fluse_out: ", region.fluse_out) - region.create_landuse_at_reg() - - # write to user_nl_clm data if specified - if region.create_user_mods: - nl_clm = open(os.path.join(args.user_mods_dir, "user_nl_clm"), "a") - line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, region.fluse_out)) - region.write_to_file(line, nl_clm) - nl_clm.close() - - logging.info("Successfully ran script for a regional case.") - exit() + subset_region(args, file_dict) From c5613f44bd6f98c8a6fd71499a6e614073d484dd Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 10:50:41 -0700 Subject: [PATCH 043/223] updates according to pylint --- python/ctsm/site_and_regional/base_case.py | 16 ++--- .../ctsm/site_and_regional/regional_case.py | 39 +++++----- .../site_and_regional/single_point_case.py | 72 +++++-------------- python/ctsm/subset_data.py | 8 +-- 4 files changed, 40 insertions(+), 95 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index bada3a5509..2b7cb222b1 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -4,13 +4,12 @@ import os import subprocess import logging +from datetime import date +from getpass import getuser import numpy as np import xarray as xr -from datetime import date -from getpass import getuser - myname = getuser() USRDAT_DIR = "CLM_USRDAT_DIR" @@ -64,7 +63,7 @@ def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): x_dim: dimension name in X -- lon y_dim: dimension name in Y -- lat """ - logging.debug("Open file: " + filename) + logging.debug("Open file: %s", filename) f1 = xr.open_dataset(filename) # create 1d coordinate variables to enable sel() method @@ -128,12 +127,9 @@ def update_metadata(self, nc): for attr in del_attrs: if attr in attr_list: - logging.debug("This attr should be deleted : " + attr) + logging.debug("This attr should be deleted : %s", attr) del nc.attrs[attr] - # for attr, value in attr_list.items(): - # print (attr + " = "+str(value)) - @staticmethod def get_git_sha(): """ @@ -152,7 +148,3 @@ def write_to_file(text, file): Writes text to a file, surrounding text with \n characters """ file.write("\n{}\n".format(text)) - - - - diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 638739e82c..41a0d96dba 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -1,3 +1,6 @@ +""" +Holds the class RegionalCase +""" import logging import os @@ -25,18 +28,6 @@ class RegionalCase(BaseCase): region name tag : str ending tag for output file naming - fluse_out : str - file name of output subset land use file - fluse_in : str - file name of input land use file to subset - fsurf_out : str - file name of output subset surface data file - fsurf_in : str - file name of input surface data to subset - fdomain_out : str - file name of output domain subset domain file - fdomain_in : str - file name of input domain file to subset Methods ------- @@ -67,7 +58,8 @@ def __init__( create_user_mods, output_dir, ): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm, create_user_mods) + super().__init__(create_domain, create_surfdata, create_landuse, create_datm, + create_user_mods) self.lat1 = lat1 self.lat2 = lat2 self.lon1 = lon1 @@ -80,17 +72,18 @@ def create_tag(self): if self.reg_name: self.tag = self.reg_name else: - self.tag = "{}-{}_{}-{}".format(str(self.lon1), str(self.lon2), str(self.lat1), str(self.lat2)) + self.tag = "{}-{}_{}-{}".format(str(self.lon1), str(self.lon2), str(self.lat1), + str(self.lat2)) def create_domain_at_reg(self, indir, file): # specify files fdomain_in = os.path.join(indir, file) - fdomain_out = os.path.join(self.out_dir, "domain.lnd.fv1.9x2.5_gx1v7." + + fdomain_out = os.path.join(self.output_dir, "domain.lnd.fv1.9x2.5_gx1v7." + self.tag + "_170518.nc") logging.info("fdomain_in: %s", fdomain_in) logging.info("fdomain_out: %s", fdomain_out) - logging.info("Creating domain file at region:"+ self.tag) + logging.info("Creating domain file at region: %s", self.tag) # create 1d coordinate variables to enable sel() method f2 = self.create_1d_coord(fdomain_in, "xc", "yc", "ni", "nj") @@ -107,17 +100,17 @@ def create_domain_at_reg(self, indir, file): # mode 'w' overwrites file f3.to_netcdf(path=fdomain_out, mode="w") - logging.info("Successfully created file (fdomain_out)" + fdomain_out) + logging.info("Successfully created file (fdomain_out) at %s", fdomain_out) f2.close() f3.close() def create_surfdata_at_reg(self, indir, file, user_mods_dir): - logging.info("Creating surface dataset file at region:"+ self.tag) + logging.info("Creating surface dataset file at region: %s", self.tag) # specify files fsurf_in = os.path.join(indir, file) - fsurf_out = os.path.join(self.out_dir, + fsurf_out = os.path.join(self.output_dir, "surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_" + self.tag + "_c170824.nc") logging.info("fsurf_in: %s", fsurf_in) @@ -139,7 +132,7 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir): # mode 'w' overwrites file f3.to_netcdf(path=fsurf_out, mode="w") - logging.info("created file (fsurf_out)" + fsurf_out) + logging.info("created file (fsurf_out) %s", fsurf_out) # f1.close(); f2.close() f3.close() @@ -151,11 +144,11 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir): self.write_to_file(line, nl_clm) def create_landuse_at_reg(self, indir, file, user_mods_dir): - logging.info("Creating landuse file at region:"+ self.tag) + logging.info("Creating landuse file at region: %s", self.tag) # specify files fluse_in = os.path.join(indir, file) - fluse_out = os.path.join(self.out_dir, + fluse_out = os.path.join(self.output_dir, "landuse.timeseries_1.9x2" ".5_hist_78pfts_CMIP6_simyr1850-2015_" + self.tag + ".c170824.nc") @@ -177,7 +170,7 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir): # mode 'w' overwrites file f3.to_netcdf(path=fluse_out, mode="w") - logging.info("Successfully created file (fluse_out)" + fluse_out) + logging.info("Successfully created file (fluse_out) %s", fluse_out) f2.close() f3.close() diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 833a5e6142..2a08a704e3 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -1,3 +1,7 @@ +""" +Holds the class SinglePointCase +""" + import os import logging from datetime import date @@ -36,48 +40,6 @@ class SinglePointCase(BaseCase): main output directory to write subset files to tag : str ending tag for output file naming - fdomain_in : str - file name of input domain file to subset - fdomain_out : str - file name of output subset domain domain file - fluse_in : str - file name of input land use file to subset - fluse_out : str - file name of output subset land use file - fsurf_in : str - file name of input surface data file to subset - fsurf_out : str - file name of output subset surface data file - fdatmdomain_in : str - file name of input DATM domain file to subset - fdatmdomain_out : str - file name of output subset DATM domain file - datm_syr : int - starting year for subset DATM data - datm_eyr : int - ending year for subset DATM data - dir_tpqw : str - input directory for TPQW DATM data - dir_prec : str - input directory for precipitation DATM data - dir_solar : str - input directory for solar DATM data - tag_tpqw : str - tag (file naming convention) for input TPQW DATM data - tag_prec : str - tag (file naming convention) for input precipitation DATM data - tag_solar : str - tag (file naming convention) for input solar DATM data - name_tpqw : str - stream name for TPQW DATM data - name_prec : str - stream name for precipitation DATM data - name_solar : str - stream name for solar DATM data - dir_output_datm : str - directory to write subset DATM data to (default to within main output directory) - datm_stream_file : str - file name of usr_nl_datm_streams file to write to for user_mods creation Methods ------- @@ -125,7 +87,6 @@ def __init__( self.saturation_excess = saturation_excess self.output_dir = output_dir self.tag = None - self.datm_streams_file = None def create_tag(self): if self.site_name: @@ -147,7 +108,7 @@ def create_fileout_name(filename, tag): def create_domain_at_point(self, indir, file): logging.info("----------------------------------------------------------------------") logging.info( - "Creating domain file at " + self.plon.__str__() + " " + self.plat.__str__() + ".") + "Creating domain file at {}, {}.".format(self.plon.__str__(), self.plat.__str__())) # specify files fdomain_in = os.path.join(indir, file) @@ -169,14 +130,14 @@ def create_domain_at_point(self, indir, file): wfile = os.path.join(self.output_dir, fdomain_out) f3.to_netcdf(path=wfile, mode="w") - logging.info("Successfully created file (fdomain_out) at" + wfile) + logging.info("Successfully created file (fdomain_out) at %s", wfile) f2.close() f3.close() def create_landuse_at_point(self, indir, file, user_mods_dir): logging.info("----------------------------------------------------------------------") logging.info( - "Creating landuse file at " + self.plon.__str__() + " " + self.plat.__str__() + ".") + "Creating land use file at {}, {}.".format(self.plon.__str__(), self.plat.__str__())) # specify files fluse_in = os.path.join(indir, file) @@ -210,7 +171,7 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): wfile = os.path.join(self.output_dir, fluse_out) # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") - logging.info("Successfully created file (fluse_out) at " + wfile) + logging.info("Successfully created file (fluse_out) at %s", wfile) f2.close() f3.close() @@ -223,7 +184,8 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): def create_surfdata_at_point(self, indir, file, user_mods_dir): logging.info("----------------------------------------------------------------------") logging.info( - "Creating surface dataset file at " + self.plon.__str__() + " " + self.plat.__str__() + ".") + "Creating surface dataset file at {}, {}.".format(self.plon.__str__(), + self.plat.__str__())) # specify file fsurf_in = os.path.join(indir, file) @@ -284,8 +246,8 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): del f3.attrs["History_Log"] # mode 'w' overwrites file f3.to_netcdf(path=os.path.join(self.output_dir, fsurf_out), mode="w") - logging.info("Successfully created file (fsurf_out) at " + os.path.join(self.output_dir, - fsurf_out)) + logging.info("Successfully created file (fsurf_out) at %s", os.path.join(self.output_dir, + fsurf_out)) f2.close() f3.close() @@ -298,7 +260,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): def create_datmdomain_at_point(self, indir, file, dir_output_datm): logging.info("----------------------------------------------------------------------") logging.info( - "Creating DATM domain file at " + self.plon.__str__() + " " + self.plat.__str__() + ".") + "Creating DATM domain file at {}, {}.".format(self.plon.__str__(), self.plat.__str__())) # specify files fdatmdomain_in = os.path.join(indir, file) @@ -319,7 +281,7 @@ def create_datmdomain_at_point(self, indir, file, dir_output_datm): f3.attrs["Created_from"] = fdatmdomain_in # mode 'w' overwrites file f3.to_netcdf(path=wfile, mode="w") - logging.info("Successfully created file (fdatmdomain_out) at " + wfile) + logging.info("Successfully created file (fdatmdomain_out) at %s", wfile) f2.close() f3.close() @@ -338,7 +300,7 @@ def extract_datm_at(self, file_in, file_out): f3.attrs["Created_from"] = file_in # mode 'w' overwrites file f3.to_netcdf(path=file_out, mode="w") - logging.info("Successfully created file at " + file_out) + logging.info("Successfully created file at %s", file_out) f2.close() f3.close() @@ -370,7 +332,7 @@ def write_datm_streams_lines(self, streamname, datmfiles, file): def create_datm_at_point(self, file_dict, datm_syr, datm_eyr, datm_streams_file): logging.info("----------------------------------------------------------------------") logging.info( - "Creating DATM files at " + self.plon.__str__() + " " + self.plat.__str__() + ".") + "Creating DATM files at {}, {}.".format(self.plon.__str__(), self.plat.__str__())) # -- create data files infile = [] @@ -416,7 +378,7 @@ def create_datm_at_point(self, file_dict, datm_syr, datm_eyr, datm_streams_file) file_out = outfile[n] self.extract_datm_at(file_in, file_out) - logging.info("All DATM files are created in: " + file_dict.datm_outdir + ".") + logging.info("All DATM files are created in: %s", file_dict.datm_outdir) # write to user_nl_datm_streams if specified if self.create_user_mods: diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 5faa3f3560..e5660d17ed 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -44,7 +44,6 @@ import sys import os import logging -import subprocess import argparse import configparser @@ -52,7 +51,6 @@ from argparse import ArgumentParser import textwrap -from ctsm.site_and_regional.base_case import USRDAT_DIR from ctsm.site_and_regional.regional_case import RegionalCase from ctsm.site_and_regional.single_point_case import SinglePointCase from ctsm.path_utils import path_to_ctsm_root @@ -423,7 +421,7 @@ def setup_user_mods(out_dir, user_mods_dir, cesmroot): user_file.write(line) -def setup_files(args, defaults): +def setup_files(args, defaults, cesmroot): """ Sets up the files and folders needed for this program """ @@ -431,7 +429,7 @@ def setup_files(args, defaults): os.mkdir(args.out_dir) if args.create_user_mods: - setup_user_mods(args.out_dir, args.user_mods_dir) + setup_user_mods(args.out_dir, args.user_mods_dir, cesmroot) # DATM data dir_output_datm = "datmdata" @@ -617,7 +615,7 @@ def main(): sys.exit() # create files and folders necessary and return dictionary of file/folder locations - file_dict = setup_files(args, defaults) + file_dict = setup_files(args, defaults, cesmroot) if args.run_type == "point": subset_point(args, file_dict) From f655b470efa540d40935ceb2a5c9d36707c28f89 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 10:55:09 -0700 Subject: [PATCH 044/223] update docstring --- python/ctsm/subset_data.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index e5660d17ed..5f6c495f06 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -585,7 +585,8 @@ def subset_region(args, file_dict): def main(): """ - Calls functions that subset surface, landuse, domain, and/or DATM files. + Calls functions that subset surface, landuse, domain, and/or DATM files for a region or a + single point. """ # add logging flags from ctsm_logging From c11e3aec8e52841636b47c60239518ae6c611fdf Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 11:36:36 -0700 Subject: [PATCH 045/223] making datm_dict its own dictionary within file_dict --- .../site_and_regional/single_point_case.py | 38 +++++++------- python/ctsm/subset_data.py | 50 ++++++++++--------- 2 files changed, 45 insertions(+), 43 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 2a08a704e3..d14e6adc80 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -329,7 +329,7 @@ def write_datm_streams_lines(self, streamname, datmfiles, file): self.write_to_file("{}:mapalgo=none".format(streamname), file) self.write_to_file("{}:meshfile=none".format(streamname), file) - def create_datm_at_point(self, file_dict, datm_syr, datm_eyr, datm_streams_file): + def create_datm_at_point(self, datm_dict: dict, datm_syr, datm_eyr, datm_streams_file): logging.info("----------------------------------------------------------------------") logging.info( "Creating DATM files at {}, {}.".format(self.plon.__str__(), self.plat.__str__())) @@ -349,27 +349,27 @@ def create_datm_at_point(self, file_dict, datm_syr, datm_eyr, datm_streams_file) dtag = ystr + "-" + mstr - fsolar = os.path.join(file_dict.datm_indir, file_dict.dir_solar, - "{}{}.nc".format(file_dict.tag_solar, dtag)) - fsolar2 = "{}{}.{}.nc".format(file_dict.tag_solar, self.tag, dtag) - fprecip = os.path.join(file_dict.datm_indir, file_dict.dir_prec, - "{}{}.nc".format(file_dict.tag_prec, dtag)) - fprecip2 = "{}{}.{}.nc".format(file_dict.tag_prec, self.tag, dtag) - ftpqw = os.path.join(file_dict.datm_indir, file_dict.dir_tpqw, - "{}{}.nc".format(file_dict.tag_tpqw, dtag)) - ftpqw2 = "{}{}.{}.nc".format(file_dict.tag_tpqw, self.tag, dtag) - - outdir = os.path.join(self.output_dir, file_dict.datm_outdir) + fsolar = os.path.join(datm_dict["datm_indir"], datm_dict["dir_solar"], + "{}{}.nc".format(datm_dict["tag_solar"], dtag)) + fsolar2 = "{}{}.{}.nc".format(datm_dict["tag_solar"], self.tag, dtag) + fprecip = os.path.join(datm_dict["datm_indir"], datm_dict["dir_prec"], + "{}{}.nc".format(datm_dict["tag_prec"], dtag)) + fprecip2 = "{}{}.{}.nc".format(datm_dict["tag_prec"], self.tag, dtag) + ftpqw = os.path.join(datm_dict["datm_indir"], datm_dict["dir_tpqw"], + "{}{}.nc".format(datm_dict["tag_tpqw"], dtag)) + ftpqw2 = "{}{}.{}.nc".format(datm_dict["tag_tpqw"], self.tag, dtag) + + outdir = os.path.join(self.output_dir, datm_dict["datm_outdir"]) infile += [fsolar, fprecip, ftpqw] outfile += [os.path.join(outdir, fsolar2), os.path.join(outdir, fprecip2), os.path.join(outdir, ftpqw2)] solarfiles.append( - os.path.join("${}".format(USRDAT_DIR), file_dict.datm_outdir, fsolar2)) + os.path.join("${}".format(USRDAT_DIR), datm_dict["datm_outdir"], fsolar2)) precfiles.append( - os.path.join("${}".format(USRDAT_DIR), file_dict.datm_outdir, fprecip2)) + os.path.join("${}".format(USRDAT_DIR), datm_dict["datm_outdir"], fprecip2)) tpqwfiles.append( - os.path.join("${}".format(USRDAT_DIR), file_dict.datm_outdir, ftpqw2)) + os.path.join("${}".format(USRDAT_DIR), datm_dict["datm_outdir"], ftpqw2)) nm = len(infile) for n in range(nm): @@ -378,11 +378,11 @@ def create_datm_at_point(self, file_dict, datm_syr, datm_eyr, datm_streams_file) file_out = outfile[n] self.extract_datm_at(file_in, file_out) - logging.info("All DATM files are created in: %s", file_dict.datm_outdir) + logging.info("All DATM files are created in: %s", datm_dict["datm_outdir"]) # write to user_nl_datm_streams if specified if self.create_user_mods: with open(datm_streams_file, "a") as file: - self.write_datm_streams_lines(file_dict.name_solar, solarfiles, file) - self.write_datm_streams_lines(file_dict.name_prec, precfiles, file) - self.write_datm_streams_lines(file_dict.name_tpqw, tpqwfiles, file) + self.write_datm_streams_lines(datm_dict["name_solar"], solarfiles, file) + self.write_datm_streams_lines(datm_dict["name_prec"], precfiles, file) + self.write_datm_streams_lines(datm_dict["name_tpqw"], tpqwfiles, file) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 5f6c495f06..91ddc0f130 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -432,8 +432,9 @@ def setup_files(args, defaults, cesmroot): setup_user_mods(args.out_dir, args.user_mods_dir, cesmroot) # DATM data + datm_type = 'datm_gswp3' dir_output_datm = "datmdata" - dir_input_datm = defaults.get("datm_gswp3", "dir") + dir_input_datm = defaults.get(datm_type, "dir") if args.create_datm: if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)): os.mkdir(os.path.join(args.out_dir, dir_output_datm)) @@ -451,7 +452,6 @@ def setup_files(args, defaults, cesmroot): fluse_in = defaults.get("landuse", "landuse_16pft") logging.debug("crop_flag = {} => num_pft = {}".format(args.crop_flag.__str__(), num_pft)) - datm_type = 'datm_gswp3' file_dict = {'main_dir': defaults.get("main", "clmforcingindir"), 'fdomain_in': defaults.get("domain", "file"), 'fsurf_dir': os.path.join(defaults.get("main", "clmforcingindir"), @@ -461,23 +461,24 @@ def setup_files(args, defaults, cesmroot): 'fsurf_in': fsurf_in, 'fluse_in': fluse_in, 'fdatmdomain_in': defaults.get(datm_type, "domain"), - 'datm_indir': dir_input_datm, - 'datm_outdir': dir_output_datm, - 'dir_solar': defaults.get(datm_type, 'solardir'), - 'dir_prec': defaults.get(datm_type, 'precdir'), - 'dir_tpqw': defaults.get(datm_type, 'tpqwdir'), - 'tag_solar': defaults.get(datm_type, 'solartag'), - 'tag_prec': defaults.get(datm_type, 'prec_tag'), - 'tag_tpqw': defaults.get(datm_type, 'tpqwtag'), - 'name_solar': defaults.get(datm_type, 'solarname'), - 'name_prec': defaults.get(datm_type, 'precname'), - 'name_tpqw': defaults.get(datm_type, 'tpqwname') + 'datm_dict' : { + 'datm_indir': dir_input_datm, + 'datm_outdir': dir_output_datm, + 'dir_solar': defaults.get(datm_type, 'solardir'), + 'dir_prec': defaults.get(datm_type, 'precdir'), + 'dir_tpqw': defaults.get(datm_type, 'tpqwdir'), + 'tag_solar': defaults.get(datm_type, 'solartag'), + 'tag_prec': defaults.get(datm_type, 'prec_tag'), + 'tag_tpqw': defaults.get(datm_type, 'tpqwtag'), + 'name_solar': defaults.get(datm_type, 'solarname'), + 'name_prec': defaults.get(datm_type, 'precname'), + 'name_tpqw': defaults.get(datm_type, 'tpqwname')} } return file_dict -def subset_point(args, file_dict): +def subset_point(args, file_dict : dict): """ Subsets surface, domain, land use, and/or DATM files at a single point """ @@ -508,28 +509,29 @@ def subset_point(args, file_dict): # -- Create CTSM domain file if single_point.create_domain: - single_point.create_domain_at_point(file_dict.main_dir, file_dict.fdomain_in) + single_point.create_domain_at_point(file_dict["main_dir"], file_dict["fdomain_in"]) # -- Create CTSM surface data file if single_point.create_surfdata: - single_point.create_surfdata_at_point(file_dict.fsurf_dir, file_dict.fsurf_in, + single_point.create_surfdata_at_point(file_dict["fsurf_dir"], file_dict["fsurf_in"], args.user_mods_dir) # -- Create CTSM transient landuse data file if single_point.create_landuse: - single_point.create_landuse_at_point(file_dict.fluse_dir, file_dict.fluse_in, + single_point.create_landuse_at_point(file_dict["fluse_dir"], file_dict["fluse_in"], args.user_mods_dir) # -- Create single point atmospheric forcing data if single_point.create_datm: # subset DATM domain file - single_point.create_datmdomain_at_point(file_dict.datm_indir, file_dict.fdatmdomain_in, - file_dict.datm_outdir) + single_point.create_datmdomain_at_point(file_dict["datm_indir"], + file_dict["fdatmdomain_in"], + file_dict["datm_outdir"]) # subset the DATM data nl_datm = os.path.join(args.user_mods_dir, "user_nl_datm_streams") - single_point.create_datm_at_point(file_dict, args.datm_syr, args.datm_eyr, + single_point.create_datm_at_point(file_dict['datm_dict'], args.datm_syr, args.datm_eyr, nl_datm) # -- Write shell commands @@ -539,7 +541,7 @@ def subset_point(args, file_dict): logging.info("Successfully ran script for single point.") -def subset_region(args, file_dict): +def subset_region(args, file_dict : dict): """ Subsets surface, domain, land use, and/or DATM files for a region """ @@ -567,16 +569,16 @@ def subset_region(args, file_dict): # -- Create CTSM domain file if region.create_domain: - region.create_domain_at_reg(file_dict.main_dir, file_dict.fdomain_in) + region.create_domain_at_reg(file_dict["main_dir"], file_dict["fdomain_in"]) # -- Create CTSM surface data file if region.create_surfdata: - region.create_surfdata_at_reg(file_dict.fsurf_dir, file_dict.fsurf_in, + region.create_surfdata_at_reg(file_dict["fsurf_dir"], file_dict["fsurf_in"], args.user_mods_dir) # -- Create CTSM transient landuse data file if region.create_landuse: - region.create_landuse_at_reg(file_dict.fluse_dir, file_dict.fluse_in, + region.create_landuse_at_reg(file_dict["fluse_dir"], file_dict["fluse_in"], args.user_mods_dir) logging.info("Successfully ran script for a regional case.") From 45fe47fd55fa2f3bb0c5562945d7db6a02c1345a Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 11:49:13 -0700 Subject: [PATCH 046/223] add docstrings --- python/ctsm/site_and_regional/base_case.py | 74 +++++++++++-------- .../site_and_regional/single_point_case.py | 10 +-- 2 files changed, 48 insertions(+), 36 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 2b7cb222b1..8c75d77950 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -2,15 +2,18 @@ Holds the class BaseCase, parent class to Regional and Single-Point cases """ import os -import subprocess import logging + +import subprocess + from datetime import date from getpass import getuser import numpy as np import xarray as xr -myname = getuser() +from ctsm.get_utils import get_get_short_hash + USRDAT_DIR = "CLM_USRDAT_DIR" logger = logging.getLogger(__name__) @@ -40,11 +43,15 @@ class BaseCase: add a tag and timetag to a filename ending with [._]cYYMMDD.nc or [._]YYMMDD.nc update_metadata(self, nc) - updates metadata for a netcdf file and removes attributes that should not be there + Class method for adding some new attributes (such as date, username) and + remove the old attributes from the netcdf file. """ def __init__(self, create_domain, create_surfdata, create_landuse, create_datm, create_user_mods): + """ + Initializes BaseCase with the given arguments. + """ self.create_domain = create_domain self.create_surfdata = create_surfdata self.create_landuse = create_landuse @@ -52,31 +59,39 @@ def __init__(self, create_domain, create_surfdata, create_landuse, create_datm, self.create_user_mods = create_user_mods def __str__(self): + """ + Converts ingredients of the BaseCase to string for printing. + """ return "{}\n{}".format(str(self.__class__), "\n".join( ("{} = {}".format(str(key), str(self.__dict__[key])) for key in sorted(self.__dict__)))) @staticmethod def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): """ - lon_varname : variable name that has 2d lon - lat_varname : variable name that has 2d lat - x_dim: dimension name in X -- lon - y_dim: dimension name in Y -- lat + Creates 1d coordinate variables for a netcdf file to enable sel() method + Args + filename (str) : name of the netcdf file + lon_varname (str) : variable name that has 2d lon + lat_varname (str) : variable name that has 2d lat + x_dim (str) : dimension name in X -- lon + y_dim (str): dimension name in Y -- lat + Returns: + f_out (xarray Dataset): Xarray Dataset with 1-d coords """ logging.debug("Open file: %s", filename) - f1 = xr.open_dataset(filename) + f_in = xr.open_dataset(filename) # create 1d coordinate variables to enable sel() method - lon0 = np.asarray(f1[lon_varname][0, :]) - lat0 = np.asarray(f1[lat_varname][:, 0]) + lon0 = np.asarray(f_in[lon_varname][0, :]) + lat0 = np.asarray(f_in[lat_varname][:, 0]) lon = xr.DataArray(lon0, name="lon", dims=x_dim, coords={x_dim: lon0}) lat = xr.DataArray(lat0, name="lat", dims=y_dim, coords={y_dim: lat0}) - f2 = f1.assign({"lon": lon, "lat": lat}) + f_out = f_in.assign({"lon": lon, "lat": lat}) - f2.reset_coords([lon_varname, lat_varname]) - f1.close() - return f2 + f_out.reset_coords([lon_varname, lat_varname]) + f_in.close() + return f_out @staticmethod def add_tag_to_filename(filename, tag): @@ -84,7 +99,14 @@ def add_tag_to_filename(filename, tag): Add a tag and replace timetag of a filename Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc Add the tag to just before that ending part - and change the ending part to the current time tag + and change the ending part to the current time tag. + Args + filename (str) : file name + tag (str) : string of a tag to be added to the end of filename + Raises: + Error: When it cannot find . and _ in the filename. + Returns: + fname_out (str): filename with the tag and date string added """ basename = os.path.basename(filename) cend = -10 @@ -95,21 +117,23 @@ def add_tag_to_filename(filename, tag): os.abort() today = date.today() today_string = today.strftime("%y%m%d") - return basename[:cend] + "_" + tag + "_c" + today_string + ".nc" + fname_out = "{}_{}_c{}.nc".format(basename[:cend], tag, today_string) + return fname_out def update_metadata(self, nc): """ - Updates the metadata for a subset netcdf file. + Class method for adding some new attributes (such as date, username) and + remove the old attributes from the netcdf file. """ # update attributes today = date.today() today_string = today.strftime("%Y-%m-%d") # get git hash - sha = self.get_git_sha() + sha = get_get_short_hash() nc.attrs["Created_on"] = today_string - nc.attrs["Created_by"] = myname + nc.attrs["Created_by"] = getuser() nc.attrs["Created_with"] = os.path.abspath(__file__) + " -- " + sha # delete unrelated attributes if they exist @@ -130,18 +154,6 @@ def update_metadata(self, nc): logging.debug("This attr should be deleted : %s", attr) del nc.attrs[attr] - @staticmethod - def get_git_sha(): - """ - Returns Git short SHA for the current directory. - """ - try: - sha = (subprocess.check_output(["git", "-C", os.path.dirname(__file__), "rev-parse", - "--short", "HEAD"]).strip().decode()) - except subprocess.CalledProcessError: - sha = "NOT-A-GIT-REPOSITORY" - return sha - @staticmethod def write_to_file(text, file): """ diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index d14e6adc80..a84dfe0a4c 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -48,14 +48,14 @@ class SinglePointCase(BaseCase): or the "lon-lat" format if the site name does not exist. create_fileout_name: creates a file name from a basename and a specified tag - create_domain_at_point: - Create domain file at a single point. + create_domain_at_point + creates domain file at a single point. create_landuse_at_point: - Create landuse file at a single point. + creates landuse file at a single point. create_surfdata_at_point: - Create surface dataset at a single point. + creates surface dataset at a single point. create_datmdomain_at_point: - Create DATM domain file at a single point. + creates DATM domain file at a single point. """ def __init__( From 8e63202e0d94efd60962be75b021dc45cdc19758 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 13:41:20 -0700 Subject: [PATCH 047/223] fixing pylint suggestions --- python/ctsm/subset_data.py | 2 +- python/ctsm/utils.py | 15 +++++++-------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 250954fd1e..411c2c19ca 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -141,7 +141,7 @@ def get_parser(): ) pt_parser.add_argument( "--single-pft", - help="Flag for making the whole grid 100%% single PFT. [default: %(default)s]", + help="Flag for making the whole grid 100% single PFT. [default: %(default)s]", action="store", dest="overwrite_single_pft", type=str2bool, diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 5154420321..fb34186de9 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -37,13 +37,13 @@ def fill_template_file(path_to_template, path_to_final, substitutions): final_file.write(final_file_contents) -def str2bool(v): +def str2bool(arg): """ Function for converting different forms of command line boolean strings to boolean value. Args: - v (str): String bool input + arg (str): String bool input Raises: if the argument is not an acceptable boolean string @@ -53,11 +53,10 @@ def str2bool(v): Returns: bool: Boolean value corresponding to the input. """ - if isinstance(v, bool): - return v - if v.lower() in ("yes", "true", "t", "y", "1"): + if isinstance(arg, bool): + return arg + if arg.lower() in ("yes", "true", "t", "y", "1"): return True - elif v.lower() in ("no", "false", "f", "n", "0"): + if arg.lower() in ("no", "false", "f", "n", "0"): return False - else: - raise ValueError("Boolean value expected. [true or false] or [y or n]") + raise ValueError("Boolean value expected. [true or false] or [y or n]") From b35a57287d73d50fc29696ce6d40eb9d1ebc1f14 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 13:46:44 -0700 Subject: [PATCH 048/223] fix typo in defaults.get --- python/ctsm/subset_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 411c2c19ca..96dacbfaad 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -474,7 +474,7 @@ def setup_files(args, defaults, cesmroot): 'dir_prec': defaults.get(datm_type, 'precdir'), 'dir_tpqw': defaults.get(datm_type, 'tpqwdir'), 'tag_solar': defaults.get(datm_type, 'solartag'), - 'tag_prec': defaults.get(datm_type, 'prec_tag'), + 'tag_prec': defaults.get(datm_type, 'prectag'), 'tag_tpqw': defaults.get(datm_type, 'tpqwtag'), 'name_solar': defaults.get(datm_type, 'solarname'), 'name_prec': defaults.get(datm_type, 'precname'), From d8819a94df33b05ee920292b7bb714359b03dba5 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 13:56:21 -0700 Subject: [PATCH 049/223] update file names to separate dir and file --- .../ctsm/site_and_regional/regional_case.py | 27 ++++++++++--------- .../site_and_regional/single_point_case.py | 15 ++++++----- 2 files changed, 24 insertions(+), 18 deletions(-) diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index b7b175df9f..36736333be 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -106,9 +106,9 @@ def create_domain_at_reg(self, indir, file): # specify files fdomain_in = os.path.join(indir, file) - fdomain_out = os.path.join(self.output_dir, self.add_tag_to_filename(fdomain_in, self.tag)) + fdomain_out = self.add_tag_to_filename(fdomain_in, self.tag) logging.info("fdomain_in: %s", fdomain_in) - logging.info("fdomain_out: %s", fdomain_out) + logging.info("fdomain_out: %s", os.path.join(self.output_dir, fdomain_out)) logging.info("Creating domain file at region: %s", self.tag) # create 1d coordinate variables to enable sel() method @@ -125,8 +125,9 @@ def create_domain_at_reg(self, indir, file): f_out.attrs["Created_from"] = fdomain_in # mode 'w' overwrites file - f_out.to_netcdf(path=fdomain_out, mode="w") - logging.info("Successfully created file (fdomain_out) %s", fdomain_out) + wfile = os.path.join(self.output_dir, fdomain_out) + f_out.to_netcdf(path=wfile, mode="w") + logging.info("Successfully created file (fdomain_out) %s", wfile) f_in.close() f_out.close() @@ -139,9 +140,9 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir): # specify files fsurf_in = os.path.join(indir, file) - fsurf_out = os.path.join(self.output_dir, self.add_tag_to_filename(fsurf_in, self.tag)) + fsurf_out = self.add_tag_to_filename(fsurf_in, self.tag) logging.info("fsurf_in: %s", fsurf_in) - logging.info("fsurf_out: %s", fsurf_out) + logging.info("fsurf_out: %s", os.path.join(self.output_dir, fsurf_out)) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") @@ -157,8 +158,9 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir): f_out.attrs["Created_from"] = fsurf_in # mode 'w' overwrites file - f_out.to_netcdf(path=fsurf_out, mode="w") - logging.info("created file (fsurf_out) %s", fsurf_out) + wfile = os.path.join(self.output_dir, fsurf_out) + f_out.to_netcdf(path=wfile, mode="w") + logging.info("created file (fsurf_out) %s", wfile) f_in.close() f_out.close() @@ -177,9 +179,9 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir): # specify files fluse_in = os.path.join(indir, file) - fluse_out = os.path.join(self.output_dir, self.add_tag_to_filename(fluse_in, self.tag)) + fluse_out = self.add_tag_to_filename(fluse_in, self.tag) logging.info("fluse_in: %s", fluse_in) - logging.info("fluse_out: %s", fluse_out) + logging.info("fluse_out: %s", os.path.join(self.output_dir, fluse_out)) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord( @@ -197,8 +199,9 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir): f_out.attrs["Created_from"] = fluse_in # mode 'w' overwrites file - f_out.to_netcdf(path=fluse_out, mode="w") - logging.info("Successfully created file (fluse_out) %s", fluse_out) + wfile = os.path.join(self.output_dir, fluse_out) + f_out.to_netcdf(path=wfile, mode="w") + logging.info("Successfully created file (fluse_out) %s", wfile) f_in.close() f_out.close() diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index b4e8be5cf7..8068a44f4b 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -143,8 +143,9 @@ def create_domain_at_point(self, indir, file): self.update_metadata(f_out) f_out.attrs["Created_from"] = fdomain_in + wfile = os.path.join(self.output_dir, fdomain_out) f_out.to_netcdf(path=fdomain_out, mode="w") - logging.info("Successfully created file (fdomain_out) %s", fdomain_out) + logging.info("Successfully created file (fdomain_out) %s", wfile) f_in.close() f_out.close() @@ -191,7 +192,7 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): wfile = os.path.join(self.output_dir, fluse_out) # mode 'w' overwrites file f_out.to_netcdf(path=wfile, mode="w") - logging.info("Successfully created file (fluse_out), %s", fluse_out) + logging.info("Successfully created file (fluse_out), %s", wfile) f_in.close() f_out.close() @@ -271,8 +272,9 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): f_out.attrs["Created_from"] = fsurf_in del f_out.attrs["History_Log"] # mode 'w' overwrites file - f_out.to_netcdf(path=fsurf_out, mode="w") - logging.info("Successfully created file (fsurf_out) %s", fsurf_out) + wfile = os.path.join(self.output_dir, fsurf_out) + f_out.to_netcdf(path=wfile, mode="w") + logging.info("Successfully created file (fsurf_out) %s", wfile) f_in.close() f_out.close() @@ -311,8 +313,9 @@ def create_datmdomain_at_point(self, indir, file, dir_output_datm): f_out.attrs["Created_from"] = fdatmdomain_in # mode 'w' overwrites file - f_out.to_netcdf(path=fdatmdomain_out, mode="w") - logging.info("Successfully created file (fdatmdomain_out) : %s", fdatmdomain_out) + wfile = os.path.join(self.output_dir, fdatmdomain_out) + f_out.to_netcdf(path=wfile, mode="w") + logging.info("Successfully created file (fdatmdomain_out) : %s", wfile) f_in.close() f_out.close() From 799fc3d987ff22fd1ea2988f75541fd23ee45925 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 14:01:22 -0700 Subject: [PATCH 050/223] fix typo with file writing --- python/ctsm/site_and_regional/single_point_case.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 8068a44f4b..a342de036d 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -350,12 +350,12 @@ def write_shell_commands(self, file): writes out xml commands commands to a file (i.e. shell_commands) for single-point runs """ # write_to_file surrounds text with newlines - with open(file, 'w'): - self.write_to_file("# Change below line if you move the subset data directory", file) - self.write_to_file("./xmlchange {}={}".format(USRDAT_DIR, self.output_dir), file) - self.write_to_file("./xmlchange PTS_LON={}".format(str(self.plon)), file) - self.write_to_file("./xmlchange PTS_LAT={}".format(str(self.plat)), file) - self.write_to_file("./xmlchange MPILIB=mpi-serial", file) + with open(file, 'w') as nl_file: + self.write_to_file("# Change below line if you move the subset data directory", nl_file) + self.write_to_file("./xmlchange {}={}".format(USRDAT_DIR, self.output_dir), nl_file) + self.write_to_file("./xmlchange PTS_LON={}".format(str(self.plon)), nl_file) + self.write_to_file("./xmlchange PTS_LAT={}".format(str(self.plat)), nl_file) + self.write_to_file("./xmlchange MPILIB=mpi-serial", nl_file) def write_datm_streams_lines(self, streamname, datmfiles, file): """ From 73d3c286c050d3bdacd157a7897235192a3e9eb2 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 14:10:42 -0700 Subject: [PATCH 051/223] fix directory of user_mods --- python/ctsm/subset_data.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 96dacbfaad..af9de74c24 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -404,12 +404,10 @@ def plon_type(plon): return plon -def setup_user_mods(out_dir, user_mods_dir, cesmroot): +def setup_user_mods(user_mods_dir, cesmroot): """ Sets up the user mods files and directories """ - if user_mods_dir == "": - user_mods_dir = os.path.join(out_dir, "user_mods") if not os.path.isdir(user_mods_dir): os.mkdir(user_mods_dir) @@ -431,11 +429,14 @@ def setup_files(args, defaults, cesmroot): """ Sets up the files and folders needed for this program """ + + if args.user_mods_dir == "": + args.user_mods_dir = os.path.join(args.out_dir, "user_mods") if not os.path.isdir(args.out_dir): os.mkdir(args.out_dir) if args.create_user_mods: - setup_user_mods(args.out_dir, args.user_mods_dir, cesmroot) + setup_user_mods(args.user_mods_dir, cesmroot) # DATM data datm_type = 'datm_gswp3' From c47ae2cfa8cd727ed5e5addab39afef9dfaa7f85 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 14:16:02 -0700 Subject: [PATCH 052/223] add datm_dict functionality to datmdomain --- .../site_and_regional/single_point_case.py | 7 ++-- python/ctsm/subset_data.py | 35 +++++++++---------- 2 files changed, 20 insertions(+), 22 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index a342de036d..e9c0fbee4a 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -284,7 +284,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, fsurf_out)) self.write_to_file(line, nl_clm) - def create_datmdomain_at_point(self, indir, file, dir_output_datm): + def create_datmdomain_at_point(self, datm_dict : dict): """ Create DATM domain file at a single point """ @@ -292,10 +292,11 @@ def create_datmdomain_at_point(self, indir, file, dir_output_datm): logging.info( "Creating DATM domain file at %s, %s", self.plon.__str__(), self.plat.__str__()) + # specify files - fdatmdomain_in = os.path.join(indir, file) + fdatmdomain_in = os.path.join(datm_dict["datm_indir"], datm_dict["fdatmdomain_in"],) datm_file = self.add_tag_to_filename(fdatmdomain_in, self.tag) - fdatmdomain_out = os.path.join(dir_output_datm, datm_file) + fdatmdomain_out = os.path.join(datm_dict["datm_outdir"], datm_file) logging.info("fdatmdomain_in: %s", fdatmdomain_in) logging.info("fdatmdomain out: %s", os.path.join(self.output_dir, fdatmdomain_out)) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index af9de74c24..3a544e737f 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -468,24 +468,24 @@ def setup_files(args, defaults, cesmroot): 'fsurf_in': fsurf_in, 'fluse_in': fluse_in, 'fdatmdomain_in': defaults.get(datm_type, "domain"), - 'datm_dict' : { - 'datm_indir': dir_input_datm, - 'datm_outdir': dir_output_datm, - 'dir_solar': defaults.get(datm_type, 'solardir'), - 'dir_prec': defaults.get(datm_type, 'precdir'), - 'dir_tpqw': defaults.get(datm_type, 'tpqwdir'), - 'tag_solar': defaults.get(datm_type, 'solartag'), - 'tag_prec': defaults.get(datm_type, 'prectag'), - 'tag_tpqw': defaults.get(datm_type, 'tpqwtag'), - 'name_solar': defaults.get(datm_type, 'solarname'), - 'name_prec': defaults.get(datm_type, 'precname'), - 'name_tpqw': defaults.get(datm_type, 'tpqwname')} + 'datm_dict': { + 'datm_indir': dir_input_datm, + 'datm_outdir': dir_output_datm, + 'dir_solar': defaults.get(datm_type, 'solardir'), + 'dir_prec': defaults.get(datm_type, 'precdir'), + 'dir_tpqw': defaults.get(datm_type, 'tpqwdir'), + 'tag_solar': defaults.get(datm_type, 'solartag'), + 'tag_prec': defaults.get(datm_type, 'prectag'), + 'tag_tpqw': defaults.get(datm_type, 'tpqwtag'), + 'name_solar': defaults.get(datm_type, 'solarname'), + 'name_prec': defaults.get(datm_type, 'precname'), + 'name_tpqw': defaults.get(datm_type, 'tpqwname')} } return file_dict -def subset_point(args, file_dict : dict): +def subset_point(args, file_dict: dict): """ Subsets surface, domain, land use, and/or DATM files at a single point """ @@ -530,11 +530,8 @@ def subset_point(args, file_dict : dict): # -- Create single point atmospheric forcing data if single_point.create_datm: - # subset DATM domain file - single_point.create_datmdomain_at_point(file_dict["datm_indir"], - file_dict["fdatmdomain_in"], - file_dict["datm_outdir"]) + single_point.create_datmdomain_at_point(file_dict["datm_dict"]) # subset the DATM data nl_datm = os.path.join(args.user_mods_dir, "user_nl_datm_streams") @@ -548,7 +545,7 @@ def subset_point(args, file_dict : dict): logging.info("Successfully ran script for single point.") -def subset_region(args, file_dict : dict): +def subset_region(args, file_dict: dict): """ Subsets surface, domain, land use, and/or DATM files for a region """ @@ -586,7 +583,7 @@ def subset_region(args, file_dict : dict): # -- Create CTSM transient landuse data file if region.create_landuse: region.create_landuse_at_reg(file_dict["fluse_dir"], file_dict["fluse_in"], - args.user_mods_dir) + args.user_mods_dir) logging.info("Successfully ran script for a regional case.") sys.exit() From af08452bcbd8048cae05366024ff268a6ef30eb9 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 14:16:59 -0700 Subject: [PATCH 053/223] fix typo --- python/ctsm/site_and_regional/single_point_case.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index e9c0fbee4a..8b5e1979b5 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -294,7 +294,7 @@ def create_datmdomain_at_point(self, datm_dict : dict): # specify files - fdatmdomain_in = os.path.join(datm_dict["datm_indir"], datm_dict["fdatmdomain_in"],) + fdatmdomain_in = os.path.join(datm_dict["datm_indir"], datm_dict["fdatmdomain_in"]) datm_file = self.add_tag_to_filename(fdatmdomain_in, self.tag) fdatmdomain_out = os.path.join(datm_dict["datm_outdir"], datm_file) logging.info("fdatmdomain_in: %s", fdatmdomain_in) From f6a3ecd8d2a8af9f59884745db46e998341a8ce5 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 14 Dec 2021 14:18:12 -0700 Subject: [PATCH 054/223] move datmdomain into datm_dict --- python/ctsm/site_and_regional/single_point_case.py | 7 +++---- python/ctsm/subset_data.py | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 8b5e1979b5..2981d69ff4 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -241,8 +241,8 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): f_out["PCT_LAKE"][:, :] = 0.0 f_out["PCT_WETLAND"][:, :] = 0.0 f_out["PCT_URBAN"][ - :, - :, + :, + :, ] = 0.0 f_out["PCT_GLACIER"][:, :] = 0.0 if self.uniform_snowpack: @@ -284,7 +284,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, fsurf_out)) self.write_to_file(line, nl_clm) - def create_datmdomain_at_point(self, datm_dict : dict): + def create_datmdomain_at_point(self, datm_dict: dict): """ Create DATM domain file at a single point """ @@ -292,7 +292,6 @@ def create_datmdomain_at_point(self, datm_dict : dict): logging.info( "Creating DATM domain file at %s, %s", self.plon.__str__(), self.plat.__str__()) - # specify files fdatmdomain_in = os.path.join(datm_dict["datm_indir"], datm_dict["fdatmdomain_in"]) datm_file = self.add_tag_to_filename(fdatmdomain_in, self.tag) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 3a544e737f..5b932688a5 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -467,10 +467,10 @@ def setup_files(args, defaults, cesmroot): os.path.join(defaults.get("landuse", "dir"))), 'fsurf_in': fsurf_in, 'fluse_in': fluse_in, - 'fdatmdomain_in': defaults.get(datm_type, "domain"), 'datm_dict': { 'datm_indir': dir_input_datm, 'datm_outdir': dir_output_datm, + 'fdatmdomain_in': defaults.get(datm_type, "domain"), 'dir_solar': defaults.get(datm_type, 'solardir'), 'dir_prec': defaults.get(datm_type, 'precdir'), 'dir_tpqw': defaults.get(datm_type, 'tpqwdir'), From 12b04a7f7ed75c4317775a80923b36919ea60544 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Wed, 15 Dec 2021 07:15:40 -0700 Subject: [PATCH 055/223] add namedtuple --- python/ctsm/site_and_regional/base_case.py | 7 +++ .../site_and_regional/single_point_case.py | 46 +++++++++---------- python/ctsm/subset_data.py | 40 ++++++++-------- 3 files changed, 50 insertions(+), 43 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index c302341d66..b4fca056fa 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -8,6 +8,7 @@ # -- standard libraries import os import logging +from collections import namedtuple from datetime import date from getpass import getuser @@ -22,6 +23,12 @@ USRDAT_DIR = "CLM_USRDAT_DIR" logger = logging.getLogger(__name__) +# named tuple for datm input/output files and folder names +DatmFiles = namedtuple( + "DatmFiles", + "indir outdir fdomain_in dir_solar dir_prec dir_tpqw tag_solar tag_prec tag_tpqw name_solar " + "name_prec name_tpqw " +) class BaseCase: """ diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 2981d69ff4..5302e5c0f1 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -12,7 +12,7 @@ import xarray as xr # -- import local classes for this script -from ctsm.site_and_regional.base_case import BaseCase, USRDAT_DIR +from ctsm.site_and_regional.base_case import BaseCase, USRDAT_DIR, DatmFiles logger = logging.getLogger(__name__) @@ -284,7 +284,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): line = "fsurdat = '${}'".format(os.path.join(USRDAT_DIR, fsurf_out)) self.write_to_file(line, nl_clm) - def create_datmdomain_at_point(self, datm_dict: dict): + def create_datmdomain_at_point(self, datm_tuple: DatmFiles): """ Create DATM domain file at a single point """ @@ -293,9 +293,9 @@ def create_datmdomain_at_point(self, datm_dict: dict): "Creating DATM domain file at %s, %s", self.plon.__str__(), self.plat.__str__()) # specify files - fdatmdomain_in = os.path.join(datm_dict["datm_indir"], datm_dict["fdatmdomain_in"]) + fdatmdomain_in = os.path.join(datm_tuple.indir, datm_tuple.fdomain_in) datm_file = self.add_tag_to_filename(fdatmdomain_in, self.tag) - fdatmdomain_out = os.path.join(datm_dict["datm_outdir"], datm_file) + fdatmdomain_out = os.path.join(datm_tuple.outdir, datm_file) logging.info("fdatmdomain_in: %s", fdatmdomain_in) logging.info("fdatmdomain out: %s", os.path.join(self.output_dir, fdatmdomain_out)) @@ -370,7 +370,7 @@ def write_datm_streams_lines(self, streamname, datmfiles, file): self.write_to_file("{}:mapalgo=none".format(streamname), file) self.write_to_file("{}:meshfile=none".format(streamname), file) - def create_datm_at_point(self, datm_dict: dict, datm_syr, datm_eyr, datm_streams_file): + def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_streams_file): """ Create all of a DATM dataset at a point. """ @@ -392,27 +392,27 @@ def create_datm_at_point(self, datm_dict: dict, datm_syr, datm_eyr, datm_streams dtag = ystr + "-" + mstr - fsolar = os.path.join(datm_dict["datm_indir"], datm_dict["dir_solar"], - "{}{}.nc".format(datm_dict["tag_solar"], dtag)) - fsolar2 = "{}{}.{}.nc".format(datm_dict["tag_solar"], self.tag, dtag) - fprecip = os.path.join(datm_dict["datm_indir"], datm_dict["dir_prec"], - "{}{}.nc".format(datm_dict["tag_prec"], dtag)) - fprecip2 = "{}{}.{}.nc".format(datm_dict["tag_prec"], self.tag, dtag) - ftpqw = os.path.join(datm_dict["datm_indir"], datm_dict["dir_tpqw"], - "{}{}.nc".format(datm_dict["tag_tpqw"], dtag)) - ftpqw2 = "{}{}.{}.nc".format(datm_dict["tag_tpqw"], self.tag, dtag) - - outdir = os.path.join(self.output_dir, datm_dict["datm_outdir"]) + fsolar = os.path.join(datm_tuple.indir, datm_tuple.dir_solar, + "{}{}.nc".format(datm_tuple.tag_solar, dtag)) + fsolar2 = "{}{}.{}.nc".format(datm_tuple.tag_solar, self.tag, dtag) + fprecip = os.path.join(datm_tuple.indir, datm_tuple.dir_prec, + "{}{}.nc".format(datm_tuple.tag_prec, dtag)) + fprecip2 = "{}{}.{}.nc".format(datm_tuple.tag_prec, self.tag, dtag) + ftpqw = os.path.join(datm_tuple.indir, datm_tuple.dir_tpqw, + "{}{}.nc".format(datm_tuple.dir_tpqw, dtag)) + ftpqw2 = "{}{}.{}.nc".format(datm_tuple.tag_tpqw, self.tag, dtag) + + outdir = os.path.join(self.output_dir, datm_tuple.outdir) infile += [fsolar, fprecip, ftpqw] outfile += [os.path.join(outdir, fsolar2), os.path.join(outdir, fprecip2), os.path.join(outdir, ftpqw2)] solarfiles.append( - os.path.join("${}".format(USRDAT_DIR), datm_dict["datm_outdir"], fsolar2)) + os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fsolar2)) precfiles.append( - os.path.join("${}".format(USRDAT_DIR), datm_dict["datm_outdir"], fprecip2)) + os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fprecip2)) tpqwfiles.append( - os.path.join("${}".format(USRDAT_DIR), datm_dict["datm_outdir"], ftpqw2)) + os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, ftpqw2)) nm = len(infile) for n in range(nm): @@ -421,11 +421,11 @@ def create_datm_at_point(self, datm_dict: dict, datm_syr, datm_eyr, datm_streams file_out = outfile[n] self.extract_datm_at(file_in, file_out) - logging.info("All DATM files are created in: %s", datm_dict["datm_outdir"]) + logging.info("All DATM files are created in: %s", datm_tuple["datm_outdir"]) # write to user_nl_datm_streams if specified if self.create_user_mods: with open(datm_streams_file, "a") as file: - self.write_datm_streams_lines(datm_dict["name_solar"], solarfiles, file) - self.write_datm_streams_lines(datm_dict["name_prec"], precfiles, file) - self.write_datm_streams_lines(datm_dict["name_tpqw"], tpqwfiles, file) + self.write_datm_streams_lines(datm_tuple["name_solar"], solarfiles, file) + self.write_datm_streams_lines(datm_tuple["name_prec"], precfiles, file) + self.write_datm_streams_lines(datm_tuple["name_tpqw"], tpqwfiles, file) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 5b932688a5..37b06aba03 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -16,9 +16,9 @@ script subsets default surface, landuse, and DATM files, which can be seen in the defaults.cfg file. -To run a single-point or regional case using this data, you must update the -variable(s) `fsurdat` and/or `landuse` in the user_nl_clm namelist file to be -the full path to the subset files. This script will automatically create this +To run a single-point or regional case using this data with the NUOPC driver, +you must update the variable(s) `fsurdat` and/or `landuse` in the user_nl_clm namelist +file to be the full path to the subset files. This script will automatically create this file using the flag --create-user-mods. To use subset climate data, the namelist file user_nl_datm_streams must also be updated - this script will automatically create this file with @@ -60,6 +60,7 @@ from argparse import ArgumentParser # -- import local classes for this script +from ctsm.site_and_regional.base_case import DatmFiles from ctsm.site_and_regional.single_point_case import SinglePointCase from ctsm.site_and_regional.regional_case import RegionalCase from ctsm.path_utils import path_to_ctsm_root @@ -148,7 +149,7 @@ def get_parser(): nargs="?", const=True, required=False, - default=True, + default=False, ) pt_parser.add_argument( "--zero-nonveg", @@ -467,19 +468,18 @@ def setup_files(args, defaults, cesmroot): os.path.join(defaults.get("landuse", "dir"))), 'fsurf_in': fsurf_in, 'fluse_in': fluse_in, - 'datm_dict': { - 'datm_indir': dir_input_datm, - 'datm_outdir': dir_output_datm, - 'fdatmdomain_in': defaults.get(datm_type, "domain"), - 'dir_solar': defaults.get(datm_type, 'solardir'), - 'dir_prec': defaults.get(datm_type, 'precdir'), - 'dir_tpqw': defaults.get(datm_type, 'tpqwdir'), - 'tag_solar': defaults.get(datm_type, 'solartag'), - 'tag_prec': defaults.get(datm_type, 'prectag'), - 'tag_tpqw': defaults.get(datm_type, 'tpqwtag'), - 'name_solar': defaults.get(datm_type, 'solarname'), - 'name_prec': defaults.get(datm_type, 'precname'), - 'name_tpqw': defaults.get(datm_type, 'tpqwname')} + 'datm_tuple': DatmFiles(dir_input_datm, + dir_output_datm, + defaults.get(datm_type, "domain"), + defaults.get(datm_type, 'solardir'), + defaults.get(datm_type, 'precdir'), + defaults.get(datm_type, 'tpqwdir'), + defaults.get(datm_type, 'solartag'), + defaults.get(datm_type, 'prectag'), + defaults.get(datm_type, 'tpqwtag'), + defaults.get(datm_type, 'solarname'), + defaults.get(datm_type, 'precname'), + defaults.get(datm_type, 'tpqwname')) } return file_dict @@ -531,11 +531,11 @@ def subset_point(args, file_dict: dict): # -- Create single point atmospheric forcing data if single_point.create_datm: # subset DATM domain file - single_point.create_datmdomain_at_point(file_dict["datm_dict"]) + single_point.create_datmdomain_at_point(file_dict["datm_tuple"]) # subset the DATM data nl_datm = os.path.join(args.user_mods_dir, "user_nl_datm_streams") - single_point.create_datm_at_point(file_dict['datm_dict'], args.datm_syr, args.datm_eyr, + single_point.create_datm_at_point(file_dict['datm_tuple'], args.datm_syr, args.datm_eyr, nl_datm) # -- Write shell commands @@ -617,7 +617,7 @@ def main(): # --------------------------------- # # print help and exit when no option is chosen - if args.run_type != "point" and args.run_type != "reg": + if args.run_type != "point" and args.run_type != "region": get_parser().print_help() sys.exit() From 7bf3f12865e3f5ac3cb77e64d327a2b22e304f35 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Wed, 15 Dec 2021 07:20:49 -0700 Subject: [PATCH 056/223] fix typo in datm tag --- python/ctsm/site_and_regional/single_point_case.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 5302e5c0f1..3cea3dc39b 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -399,7 +399,7 @@ def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_s "{}{}.nc".format(datm_tuple.tag_prec, dtag)) fprecip2 = "{}{}.{}.nc".format(datm_tuple.tag_prec, self.tag, dtag) ftpqw = os.path.join(datm_tuple.indir, datm_tuple.dir_tpqw, - "{}{}.nc".format(datm_tuple.dir_tpqw, dtag)) + "{}{}.nc".format(datm_tuple.tag_tpqw, dtag)) ftpqw2 = "{}{}.{}.nc".format(datm_tuple.tag_tpqw, self.tag, dtag) outdir = os.path.join(self.output_dir, datm_tuple.outdir) @@ -421,11 +421,11 @@ def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_s file_out = outfile[n] self.extract_datm_at(file_in, file_out) - logging.info("All DATM files are created in: %s", datm_tuple["datm_outdir"]) + logging.info("All DATM files are created in: %s", datm_tuple.outdir) # write to user_nl_datm_streams if specified if self.create_user_mods: with open(datm_streams_file, "a") as file: - self.write_datm_streams_lines(datm_tuple["name_solar"], solarfiles, file) - self.write_datm_streams_lines(datm_tuple["name_prec"], precfiles, file) - self.write_datm_streams_lines(datm_tuple["name_tpqw"], tpqwfiles, file) + self.write_datm_streams_lines(datm_tuple.name_solar, solarfiles, file) + self.write_datm_streams_lines(datm_tuple.name_prec, precfiles, file) + self.write_datm_streams_lines(datm_tuple.name_tpqw, tpqwfiles, file) From 5dc0a44da417ecb5a824e53ad9af4441c90eadb1 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Wed, 15 Dec 2021 08:54:03 -0700 Subject: [PATCH 057/223] update import of str2bool --- python/ctsm/site_and_regional/base_case.py | 12 ++++++------ python/ctsm/site_and_regional/single_point_case.py | 9 +++------ python/ctsm/subset_data.py | 2 +- 3 files changed, 10 insertions(+), 13 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index b4fca056fa..43976c3a8c 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -161,7 +161,7 @@ def add_tag_to_filename(filename, tag): return fname_out @staticmethod - def update_metadata(nc): + def update_metadata(nc_file): """ Class method for adding some new attributes (such as date, username) and remove the old attributes from the netcdf file. @@ -187,9 +187,9 @@ def update_metadata(nc): # get git hash sha = get_git_short_hash() - nc.attrs["Created_on"] = today_string - nc.attrs["Created_by"] = getuser() - nc.attrs["Created_with"] = os.path.abspath(__file__) + " -- " + sha + nc_file.attrs["Created_on"] = today_string + nc_file.attrs["Created_by"] = getuser() + nc_file.attrs["Created_with"] = os.path.abspath(__file__) + " -- " + sha # delete unrelated attributes if they exist del_attrs = [ @@ -202,12 +202,12 @@ def update_metadata(nc): "Version", "Compiler_Optimized", ] - attr_list = nc.attrs + attr_list = nc_file.attrs for attr in del_attrs: if attr in attr_list: logging.debug("This attr should be deleted : %s", attr) - del nc.attrs[attr] + del nc_file.attrs[attr] @staticmethod def write_to_file(text, file): diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 3cea3dc39b..2caaf19a6c 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -414,12 +414,9 @@ def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_s tpqwfiles.append( os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, ftpqw2)) - nm = len(infile) - for n in range(nm): - logging.debug(outfile[n]) - file_in = infile[n] - file_out = outfile[n] - self.extract_datm_at(file_in, file_out) + for out_f, in_f in enumerate(infile): + logging.debug(outfile[out_f]) + self.extract_datm_at(in_f, outfile[out_f]) logging.info("All DATM files are created in: %s", datm_tuple.outdir) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 37b06aba03..05ff223c4a 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -65,7 +65,7 @@ from ctsm.site_and_regional.regional_case import RegionalCase from ctsm.path_utils import path_to_ctsm_root -from ctsm.utils import str2bool +from ctsm.utils import _convert_to_bool as str2bool # -- import ctsm logging flags from ctsm.ctsm_logging import ( From 15b0f8e312ce31c81f1b4df5d5919f75f1437ed5 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Wed, 15 Dec 2021 09:03:15 -0700 Subject: [PATCH 058/223] update index name for more clarity --- python/ctsm/site_and_regional/single_point_case.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 2caaf19a6c..aea749022b 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -414,9 +414,9 @@ def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_s tpqwfiles.append( os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, ftpqw2)) - for out_f, in_f in enumerate(infile): - logging.debug(outfile[out_f]) - self.extract_datm_at(in_f, outfile[out_f]) + for idx, out_f in enumerate(outfile): + logging.debug(out_f) + self.extract_datm_at(infile[idx], out_f) logging.info("All DATM files are created in: %s", datm_tuple.outdir) From 2f3a6b52b73380e83af47c2e09e0b45e64fd2cb7 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Wed, 15 Dec 2021 12:26:36 -0700 Subject: [PATCH 059/223] update flag logic --- .../site_and_regional/single_point_case.py | 5 +- python/ctsm/subset_data.py | 82 +++++++------------ 2 files changed, 31 insertions(+), 56 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index aea749022b..33bf35c6e9 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -240,10 +240,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): f_out["PCT_CROP"][:, :] = 0 f_out["PCT_LAKE"][:, :] = 0.0 f_out["PCT_WETLAND"][:, :] = 0.0 - f_out["PCT_URBAN"][ - :, - :, - ] = 0.0 + f_out["PCT_URBAN"][:, :, ] = 0.0 f_out["PCT_GLACIER"][:, :] = 0.0 if self.uniform_snowpack: f_out["STD_ELEV"][:, :] = 20.0 diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 05ff223c4a..839a69bcdb 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -65,8 +65,6 @@ from ctsm.site_and_regional.regional_case import RegionalCase from ctsm.path_utils import path_to_ctsm_root -from ctsm.utils import _convert_to_bool as str2bool - # -- import ctsm logging flags from ctsm.ctsm_logging import ( setup_logging_pre_config, @@ -96,12 +94,12 @@ def get_parser(): parser.print_usage = parser.print_help subparsers = parser.add_subparsers( - help="Two possible ways to run this sript, either:", dest="run_type" + help="Two possible ways to run this script, either:", dest="run_type" ) pt_parser = subparsers.add_parser("point", help="Run script for a single point.") rg_parser = subparsers.add_parser("region", help="Run script for a region.") - # -- signle point parser options + # -- single point parser options pt_parser.add_argument( "--lat", help="Single point latitude. [default: %(default)s]", @@ -131,47 +129,47 @@ def get_parser(): ) pt_parser.add_argument( "--unisnow", - help="Flag for creating datasets using uniform snowpack. [default: %(default)s]", - action="store", + help="Create surface dataset to a uniform snowpack. [default: %(default)s]", + action="store_true", dest="uni_snow", - type=str2bool, nargs="?", const=True, required=False, - default=True, ) pt_parser.add_argument( "--single-pft", - help="Flag for making the whole grid 100% single PFT. [default: %(default)s]", - action="store", + help="Make the whole grid 100% single PFT. [default: %(default)s]", + action="store_true", dest="overwrite_single_pft", - type=str2bool, nargs="?", const=True, required=False, - default=False, ) pt_parser.add_argument( - "--zero-nonveg", - help="Flag for setting all non-vegetation landunits to zero. [default: %(default)s]", + "--dompft", + help="Dominant PFT type . [default: %(default)s]", action="store", + dest="dom_pft", + type=int, + default=7, + ) + pt_parser.add_argument( + "--no-zero-nonveg", + help="Don't set all non-vegetation landunits to zero. [default: %(default)s]", + action="store_false", dest="zero_nonveg", - type=str2bool, nargs="?", const=True, required=False, - default=True, ) pt_parser.add_argument( - "--saturation-excess", - help="Flag for making dataset using saturation excess. [default: %(default)s]", - action="store", + "--no-saturation-excess", + help="Don't allow for saturation excess conditions in surface file. [default: %(default)s]", + action="store_false", dest="saturation_excess", - type=str2bool, nargs="?", const=True, required=False, - default=True, ) # -- region-specific parser options rg_parser.add_argument( @@ -222,9 +220,8 @@ def get_parser(): rg_parser.add_argument( "--create-mesh", help="Flag for subsetting mesh file. [default: %(default)s]", - action="store", + action="store_true", dest="create_mesh", - type=str2bool, nargs="?", const=True, required=False, @@ -236,60 +233,50 @@ def get_parser(): subparser.add_argument( "--create-domain", help="Flag for creating CLM domain file at single point/region. [default: %(default)s]", - action="store", + action="store_true", dest="create_domain", - type=str2bool, nargs="?", const=True, required=False, - default=False, ) subparser.add_argument( - "--create-surface", + "--no-create-surface", help="Flag for creating surface data file at single point/region. [default: %(" "default)s]", - action="store", + action="store_false", dest="create_surfdata", - type=str2bool, nargs="?", const=True, required=False, - default=True, ) subparser.add_argument( "--create-landuse", help="Flag for creating landuse data file at single point/region. [default: %(" "default)s]", - action="store", + action="store_true", dest="create_landuse", - type=str2bool, nargs="?", const=True, required=False, - default=False, ) subparser.add_argument( "--create-datm", help="Flag for creating DATM forcing data at single point/region. [default: %(" "default)s]", - action="store", + action="store_true", dest="create_datm", - type=str2bool, nargs="?", const=True, required=False, - default=False, ) subparser.add_argument( "--create-user-mods", help="Flag for creating a user mods directory for running CTSM. [default: %(default)s]", - action="store", + action="store_true", dest="create_user_mods", - type=str2bool, nargs="?", const=True, required=False, - default=False, ) subparser.add_argument( "--datm-syr", @@ -312,24 +299,15 @@ def get_parser(): default=2014, ) subparser.add_argument( - "--crop", - help="Flag for creating datasets using the extensive list of prognostic crop types. [" + "--no-crop", + help="Use 16-PFT version of surface dataset rather than the crop (78-PFT) version. " + "Note that the 78-PFT version does not currently work in CTSM-FATES. [" "default: %(default)s]", - action="store", + action="store_false", dest="crop_flag", - type=str2bool, nargs="?", const=True, required=False, - default=True, - ) - subparser.add_argument( - "--dompft", - help="Dominant PFT type . [default: %(default)s] ", - action="store", - dest="dom_pft", - type=int, - default=7, ) if subparser == pt_parser: From 9492b04599627a647cc5d362ea628f350ad5d8a1 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Thu, 16 Dec 2021 12:51:42 -0700 Subject: [PATCH 060/223] fix pylint issues --- python/ctsm/site_and_regional/base_case.py | 3 +- .../site_and_regional/single_point_case.py | 3 + python/ctsm/subset_data.py | 65 ++++++++++--------- 3 files changed, 38 insertions(+), 33 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 43976c3a8c..e600947efc 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -196,7 +196,8 @@ def update_metadata(nc_file): "source_code", "SVN_url", "hostname", - "history" "History_Log", + "history", + "History_Log", "Logname", "Host", "Version", diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 33bf35c6e9..83f42861f6 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -77,6 +77,9 @@ class SinglePointCase(BaseCase): Extract all DATM data at a single point. """ + # pylint: disable=too-many-instance-attributes + # the ones we have are useful + def __init__( self, plat, diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 839a69bcdb..a3c2675497 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -64,6 +64,7 @@ from ctsm.site_and_regional.single_point_case import SinglePointCase from ctsm.site_and_regional.regional_case import RegionalCase from ctsm.path_utils import path_to_ctsm_root +from ctsm.utils import _convert_to_bool as str2bool # -- import ctsm logging flags from ctsm.ctsm_logging import ( @@ -129,47 +130,55 @@ def get_parser(): ) pt_parser.add_argument( "--unisnow", - help="Create surface dataset to a uniform snowpack. [default: %(default)s]", - action="store_true", + help="Flag for creating datasets using uniform snowpack. [default: %(default)s]", + action="store", dest="uni_snow", + type=str2bool, nargs="?", const=True, required=False, + default=True, ) pt_parser.add_argument( "--single-pft", - help="Make the whole grid 100% single PFT. [default: %(default)s]", - action="store_true", + help="Flag for making the whole grid 100%% single PFT. [default: %(default)s]", + action="store", dest="overwrite_single_pft", + type=str2bool, nargs="?", const=True, required=False, + default=True, ) pt_parser.add_argument( - "--dompft", - help="Dominant PFT type . [default: %(default)s]", + "--zero-nonveg", + help="Flag for setting all non-vegetation landunits to zero. [default: %(default)s]", action="store", - dest="dom_pft", - type=int, - default=7, - ) - pt_parser.add_argument( - "--no-zero-nonveg", - help="Don't set all non-vegetation landunits to zero. [default: %(default)s]", - action="store_false", dest="zero_nonveg", + type=str2bool, nargs="?", const=True, required=False, + default=True, ) pt_parser.add_argument( - "--no-saturation-excess", - help="Don't allow for saturation excess conditions in surface file. [default: %(default)s]", - action="store_false", + "--saturation-excess", + help="Flag for making dataset using saturation excess. [default: %(default)s]", + action="store", dest="saturation_excess", + type=str2bool, nargs="?", const=True, required=False, + default=True, + ) + pt_parser.add_argument( + "--dompft", + help="Dominant PFT type if we set the grid to 100% one PFT [default: %(default)s].", + action="store", + dest="dom_pft", + type=int, + default=7, ) # -- region-specific parser options rg_parser.add_argument( @@ -220,8 +229,9 @@ def get_parser(): rg_parser.add_argument( "--create-mesh", help="Flag for subsetting mesh file. [default: %(default)s]", - action="store_true", + action="store", dest="create_mesh", + type=str2bool, nargs="?", const=True, required=False, @@ -239,16 +249,6 @@ def get_parser(): const=True, required=False, ) - subparser.add_argument( - "--no-create-surface", - help="Flag for creating surface data file at single point/region. [default: %(" - "default)s]", - action="store_false", - dest="create_surfdata", - nargs="?", - const=True, - required=False, - ) subparser.add_argument( "--create-landuse", help="Flag for creating landuse data file at single point/region. [default: %(" @@ -299,15 +299,16 @@ def get_parser(): default=2014, ) subparser.add_argument( - "--no-crop", - help="Use 16-PFT version of surface dataset rather than the crop (78-PFT) version. " - "Note that the 78-PFT version does not currently work in CTSM-FATES. [" + "--crop", + help="Flag for creating datasets using the extensive list of prognostic crop types. [" "default: %(default)s]", - action="store_false", + action="store", dest="crop_flag", + type=str2bool, nargs="?", const=True, required=False, + default=True, ) if subparser == pt_parser: From 119dfe86e853f6b9d665db4f4d565fc6bfa0dc59 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 16 Dec 2021 13:21:23 -0700 Subject: [PATCH 061/223] some minor changes and typos. --- python/ctsm/ctsm_logging.py | 2 +- python/ctsm/git_utils.py | 2 +- python/ctsm/site_and_regional/base_case.py | 36 ------------------- .../site_and_regional/single_point_case.py | 8 ++--- python/ctsm/subset_data.py | 23 ++++++------ python/ctsm/utils.py | 30 +++++++++++----- 6 files changed, 39 insertions(+), 62 deletions(-) diff --git a/python/ctsm/ctsm_logging.py b/python/ctsm/ctsm_logging.py index 1b75154ff7..f129c200cf 100644 --- a/python/ctsm/ctsm_logging.py +++ b/python/ctsm/ctsm_logging.py @@ -80,7 +80,7 @@ def process_logging_args(args): def output_to_file(filepath, message, log_to_logger=False): """ - helper functionn to write to log file. + helper function to write to log file. """ with open(filepath, 'a') as fl: fl.write(message) diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py index 1055e75bfa..f73dcebb0e 100644 --- a/python/ctsm/git_utils.py +++ b/python/ctsm/git_utils.py @@ -10,7 +10,7 @@ def get_git_short_hash(): """ Returns Git short SHA for the currect directory. """ - short_sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() + sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() return sha diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 2d1f8d0fa4..3963071b0e 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -125,42 +125,6 @@ def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): f_in.close() return f_out - @staticmethod - def add_tag_to_filename(filename, tag): - """ - Add a tag and replace timetag of a filename - Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc - Add the tag to just before that ending part - and change the ending part to the current time tag. - - Parameters - ---------- - filename (str) : file name - tag (str) : string of a tag to be added to the end of filename - - Raises - ------ - Error: When it cannot find . and _ in the filename. - - Returns - ------ - fname_out (str): filename with the tag and date string added - - """ - basename = os.path.basename(filename) - cend = -10 - if basename[cend] == "c": - cend = cend - 1 - if (basename[cend] != ".") and (basename[cend] != "_"): - logging.error( - "Trouble figuring out where to add tag to filename:" + filename - ) - os.abort() - today = date.today() - today_string = today.strftime("%y%m%d") - fname_out = basename[:cend] + "_" + tag + "_c" + today_string + ".nc" - return fname_out - def update_metadata(self, nc): """ Class method for adding some new attributes (such as date, username) and diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 52e8530b63..18a0fdfe6d 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -260,10 +260,10 @@ def create_surfdata_at_point(self): ) # update lsmlat and lsmlon to match site specific instead of the nearest point - f3['lsmlon']= np.atleast_1d(self.plon) - f3['lsmlat']= np.atleast_1d(self.plat) - f3['LATIXY'][:,:]= self.plat - f3['LONGXY'][:,:]= self.plon + f_out['lsmlon']= np.atleast_1d(self.plon) + f_out['lsmlat']= np.atleast_1d(self.plat) + f_out['LATIXY'][:,:]= self.plat + f_out['LONGXY'][:,:]= self.plon # update attributes self.update_metadata(f_out) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 55731d6cff..97a628a1b7 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -92,7 +92,7 @@ from ctsm.site_and_regional.single_point_case import SinglePointCase from ctsm.site_and_regional.regional_case import RegionalCase -from ctsm.utils import str2bool +from ctsm.utils import str2bool, add_tag_to_filename # -- import ctsm logging flags from ctsm.ctsm_logging import ( @@ -275,7 +275,7 @@ def get_parser(): nargs="?", const=True, required=False, - default=True, + default=False, ) subparser.add_argument( "--create-landuse", @@ -425,7 +425,7 @@ def plon_type(x): x (str): longitude Raises: - Error: when latitude is <-180 and >360. + Error: when longitude is <-180 and >360. Returns: x(float): converted longitude between 0 and 360 @@ -437,7 +437,7 @@ def plon_type(x): print("after modulo lon is :", x) if (x < 0) or (x > 360): raise argparse.ArgumentTypeError( - "ERROR: Latitude of single point should be between 0 and 360 or -180 and 180." + "ERROR: Longitude of single point should be between 0 and 360 or -180 and 180." ) return x @@ -547,7 +547,7 @@ def main(): dir_inputdata, "share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc" ) fdomain_out = os.path.join( - dir_output, single_point.add_tag_to_filename(fdomain_in, single_point.tag) + dir_output, add_tag_to_filename(fdomain_in, single_point.tag) ) single_point.fdomain_in = fdomain_in @@ -569,7 +569,7 @@ def main(): ) fsurf_out = os.path.join( - dir_output, single_point.add_tag_to_filename(fsurf_in, single_point.tag) + dir_output, add_tag_to_filename(fsurf_in, single_point.tag) ) single_point.fsurf_in = fsurf_in @@ -589,9 +589,8 @@ def main(): dir_inputdata, "lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc", ) - # fluse_out = dir_output + single_point.add_tag_to_filename( fluse_in, single_point.tag ) # remove resolution from filename for singlept cases fluse_out = os.path.join( - dir_output, single_point.add_tag_to_filename(fluse_in, single_point.tag) + dir_output, add_tag_to_filename(fluse_in, single_point.tag) ) single_point.fluse_in = fluse_in single_point.fluse_out = fluse_out @@ -606,7 +605,7 @@ def main(): ) fdatmdomain_out = os.path.join( dir_output_datm, - single_point.add_tag_to_filename(fdatmdomain_in, single_point.tag), + add_tag_to_filename(fdatmdomain_in, single_point.tag), ) single_point.fdatmdomain_in = fdatmdomain_in single_point.fdatmdomain_out = fdatmdomain_out @@ -709,7 +708,7 @@ def main(): dir_inputdata, "share/domains/domain.lnd.fv1.9x2.5_gx1v7.170518.nc" ) fdomain_out = os.path.join( - dir_output, region.add_tag_to_filename(fdomain_in, region.tag) + dir_output, add_tag_to_filename(fdomain_in, region.tag) ) region.fdomain_in = fdomain_in @@ -724,7 +723,7 @@ def main(): "lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc", ) fsurf_out = os.path.join( - dir_output, region.add_tag_to_filename(fsurf_in, region.tag) + dir_output, add_tag_to_filename(fsurf_in, region.tag) ) region.fsurf_in = fsurf_in @@ -739,7 +738,7 @@ def main(): "lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc", ) fluse_out = os.path.join( - dir_output, region.add_tag_to_filename(fluse_in, region.tag) + dir_output, add_tag_to_filename(fluse_in, region.tag) ) region.fluse_in = fluse_in region.fluse_out = fluse_out diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 1a3a4bc508..5213106499 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -92,22 +92,36 @@ def add_tag_to_filename(filename, tag): Add a tag and replace timetag of a filename Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc Add the tag to just before that ending part - and change the ending part to the current time tag - """ + and change the ending part to the current time tag. + + Parameters + ---------- + filename (str) : file name + tag (str) : string of a tag to be added to the end of filename + + Raises + ------ + Error: When it cannot find . and _ in the filename. + + Returns + ------ + fname_out (str): filename with the tag and date string added + """ basename = os.path.basename(filename) cend = -10 - if basename[cend] == "c": cend = cend - 1 - if ( (basename[cend] != ".") and (basename[cend] != "_") ): - errmsg = 'Trouble figuring out where to add tag to filename: ' + filename - abort(errmsg) - + if (basename[cend] != ".") and (basename[cend] != "_"): + logging.error( + "Trouble figuring out where to add tag to filename:" + filename + ) + os.abort() today = date.today() today_string = today.strftime("%y%m%d") + fname_out = basename[:cend] + "_" + tag + "_c" + today_string + ".nc" + return fname_out - return basename[:cend] + "_" + tag + "_c" + today_string + '.nc' def update_metadata(file, title, summary, contact, data_script, description): """ From e3d3d0361ca27740a5891846eb674abfd2822cce Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Thu, 16 Dec 2021 13:22:11 -0700 Subject: [PATCH 062/223] pylint issues --- .../site_and_regional/single_point_case.py | 17 +++++----- python/ctsm/subset_data.py | 31 ------------------- 2 files changed, 8 insertions(+), 40 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 4bc4132ed8..e1b4e3318f 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -237,8 +237,8 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # modify surface data properties if self.overwrite_single_pft: f_out["PCT_NAT_PFT"][:, :, :] = 0 - if (self.dominant_pft<16) : - f_out['PCT_NAT_PFT'][:,:,self.dominant_pft] = 100 + if self.dominant_pft < 16: + f_out['PCT_NAT_PFT'][:, :, self.dominant_pft] = 100 if self.zero_nonveg_landunits: f_out["PCT_NATVEG"][:, :] = 100 f_out["PCT_CROP"][:, :] = 0 @@ -252,7 +252,6 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): f_out["FMAX"][:, :] = 0.0 # specify dimension order - # f_out = f_out.transpose(u'time', u'cft', u'natpft', u'lsmlat', u'lsmlon') f_out = f_out.transpose( u"time", u"cft", @@ -269,18 +268,18 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): ) # update lsmlat and lsmlon to match site specific instead of the nearest point - f3['lsmlon']= np.atleast_1d(self.plon) - f3['lsmlat']= np.atleast_1d(self.plat) - f3['LATIXY'][:,:]= self.plat - f3['LONGXY'][:,:]= self.plon + f_out['lsmlon'] = np.atleast_1d(self.plon) + f_out['lsmlat'] = np.atleast_1d(self.plat) + f_out['LATIXY'][:, :] = self.plat + f_out['LONGXY'][:, :] = self.plon # update attributes self.update_metadata(f_out) f_out.attrs["Created_from"] = fsurf_in del f_out.attrs["History_Log"] # mode 'w' overwrites file - wfile = os.path.join(self.output_dir, fsurf_out, format = "NETCDF3_64BIT") - f_out.to_netcdf(path=wfile, mode="w") + wfile = os.path.join(self.output_dir, fsurf_out) + f_out.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") logging.info("Successfully created file (fsurf_out) %s", wfile) f_in.close() f_out.close() diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index d65520c431..f13cb367a1 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -332,37 +332,6 @@ def get_parser(): type=str, default="", ) - - pt_parser.add_argument('--datm_from_tower', - help='Flag for creating DATM forcing data at single point for a tower data. [default: %(default)s]', - action="store", - dest="datm_tower", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - pt_parser.add_argument('--create_user_mods', - help='Flag for creating user mods directory . [default: %(default)s]', - action="store", - dest="datm_tower", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - pt_parser.add_argument('--user_mods_dir', - help='Flag for creating user mods directory . [default: %(default)s]', - action="store", - dest="user_mod_dir", - type = str, - nargs = '?', - const = True, - required = False, - default = False) - - - # -- print help for both subparsers parser.epilog = textwrap.dedent( f"""\ From 139794129f5134e400bf4f430e31df42ebe03650 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 16 Dec 2021 14:06:51 -0700 Subject: [PATCH 063/223] updating the command line arguments. --- python/ctsm/subset_data.py | 91 ++++++++++++-------------------------- 1 file changed, 29 insertions(+), 62 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 97a628a1b7..d11fc61df5 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -152,48 +152,36 @@ def get_parser(): default="", ) pt_parser.add_argument( - "--unisnow", - help="Flag for creating datasets using uniform snowpack. [default: %(default)s]", - action="store", + "--variable-snow-fraction", + help="Creating datasets using variable snow fraction.If unset, snow is set to uniform fraction.", + action="store_false", dest="uni_snow", - type=str2bool, - nargs="?", - const=True, required=False, default=True, ) pt_parser.add_argument( - "--single-pft", - help="Flag for making the whole grid 100%% single PFT. [default: %(default)s]", - action="store", + "--allow-multiple-pft", + help="Creating dataset using multiple pft. If unset, it assumes the whole grid is 100% single PFT set by --dom-pft.", + action="store_false", dest="overwrite_single_pft", - type=str2bool, - nargs="?", - const=True, required=False, default=True, ) pt_parser.add_argument( "--zero-nonveg", - help="Flag for setting all non-vegetation landunits to zero. [default: %(default)s]", - action="store", + help="Creating dataset by setting all non-vegetation landunits to zero.", + action="store_true", dest="zero_nonveg", - type=str2bool, - nargs="?", - const=True, required=False, - default=True, + default=False, ) pt_parser.add_argument( - "--saturation-excess", - help="Flag for making dataset using saturation excess. [default: %(default)s]", - action="store", + "--allow-saturation-excess", + help="Creating dataset allowing saturatated conditions. If unset saturation_excess is set to zero.", + action="store_true", dest="saturation_excess", - type=str2bool, - nargs="?", - const=True, required=False, - default=True, + default=False, ) # -- region-specific parser options rg_parser.add_argument( @@ -243,12 +231,9 @@ def get_parser(): ) rg_parser.add_argument( "--create-mesh", - help="Flag for subsetting mesh file. [default: %(default)s]", - action="store", + help="Flag for subsetting mesh file.", + action="store_true", dest="create_mesh", - type=str2bool, - nargs="?", - const=True, required=False, default=False, ) @@ -257,45 +242,33 @@ def get_parser(): for subparser in [pt_parser, rg_parser]: subparser.add_argument( "--create-domain", - help="Flag for creating CLM domain file at single point/region. [default: %(default)s]", - action="store", + help="Create CLM domain file at single point/region.", + action="store_true", dest="create_domain", - type=str2bool, - nargs="?", - const=True, required=False, default=False, ) subparser.add_argument( "--create-surface", - help="Flag for creating surface data file at single point/region. [default: %(default)s]", - action="store", + help="Create surface data file at single point/region.", + action="store_true", dest="create_surfdata", - type=str2bool, - nargs="?", - const=True, required=False, default=False, ) subparser.add_argument( "--create-landuse", - help="Flag for creating landuse data file at single point/region. [default: %(default)s]", - action="store", + help="Create landuse data file at single point/region.", + action="store_true", dest="create_landuse", - type=str2bool, - nargs="?", - const=True, required=False, default=False, ) subparser.add_argument( "--create-datm", - help="Flag for creating DATM forcing data at single point/region. [default: %(default)s]", - action="store", + help="Create DATM forcing data at single point/region.", + action="store_true", dest="create_datm", - type=str2bool, - nargs="?", - const=True, required=False, default=False, ) @@ -319,14 +292,11 @@ def get_parser(): ) subparser.add_argument( "--crop", - help="Flag for creating datasets using the extensive list of prognostic crop types. [default: %(default)s]", - action="store", + help="Create datasets using the extensive list of prognostic crop types.", + action="store_true", dest="crop_flag", - type=str2bool, - nargs="?", - const=True, required=False, - default=True, + default=False, ) subparser.add_argument( "--dompft", @@ -351,13 +321,10 @@ def get_parser(): default=os.path.join(os.getcwd(), "subset_data_" + parser_name), ) - pt_parser.add_argument('--datm_from_tower', - help='Flag for creating DATM forcing data at single point for a tower data. [default: %(default)s]', - action="store", + pt_parser.add_argument('--datm-from-tower', + help='Create DATM forcing data at single point for a tower data.', + action="store_true", dest="datm_tower", - type = str2bool, - nargs = '?', - const = True, required = False, default = False) pt_parser.add_argument('--create_user_mods', From 26bf17f9637943ddbd98a20a0044fd8570015e40 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 16 Dec 2021 14:33:56 -0700 Subject: [PATCH 064/223] minor changes --- python/ctsm/site_and_regional/base_case.py | 4 +- .../ctsm/site_and_regional/regional_case.py | 12 +- .../site_and_regional/single_point_case.py | 36 ++--- python/ctsm/subset_data.py | 130 ++++++++++-------- 4 files changed, 96 insertions(+), 86 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 3963071b0e..47ff665b05 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -110,7 +110,7 @@ def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): f_out (xarray Dataset): Xarray Dataset with 1-d coords """ - logging.debug("Open file: " + filename) + logger.debug("Open file: " + filename) f_in = xr.open_dataset(filename) # create 1d coordinate variables to enable sel() method @@ -170,7 +170,7 @@ def update_metadata(self, nc): for attr in del_attrs: if attr in attr_list: - logging.debug("This attr should be deleted : " + attr) + logger.debug("This attr should be deleted : " + attr) del nc.attrs[attr] # for attr, value in attr_list.items(): diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 85c96c61b2..5ad6874f81 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -96,7 +96,7 @@ def create_tag(self): def create_domain_at_reg(self): # logging.debug ("Creating domain file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) - logging.info("Creating domain file at region:" + self.tag) + logger.info("Creating domain file at region:" + self.tag) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") lat = f_in["lat"] @@ -113,13 +113,13 @@ def create_domain_at_reg(self): wfile = self.fdomain_out # mode 'w' overwrites file f_out.to_netcdf(path=wfile, mode="w") - logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) + logger.info("Successfully created file (fdomain_out)" + self.fdomain_out) f_in.close() f_out.close() def create_surfdata_at_reg(self): # logging.debug ("Creating surface dataset file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) - logging.info("Creating surface dataset file at region:" + self.tag) + logger.info("Creating surface dataset file at region:" + self.tag) # create 1d coordinate variables to enable sel() method filename = self.fsurf_in f_in = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") @@ -136,14 +136,14 @@ def create_surfdata_at_reg(self): # mode 'w' overwrites file f_out.to_netcdf(path=self.fsurf_out, mode="w") - logging.info("created file (fsurf_out)" + self.fsurf_out) + logger.info("created file (fsurf_out)" + self.fsurf_out) # f1.close(); f_in.close() f_out.close() def create_landuse_at_reg(self): # logging.debug ("Creating landuse file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) - logging.info("Creating landuse file at region:" + self.tag) + logger.info("Creating landuse file at region:" + self.tag) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord( self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat" @@ -162,6 +162,6 @@ def create_landuse_at_reg(self): wfile = self.fluse_out # mode 'w' overwrites file f_out.to_netcdf(path=wfile, mode="w") - logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) + logger.info("Successfully created file (fdomain_out)" + self.fdomain_out) f_in.close() f_out.close() diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 18a0fdfe6d..b2eebe1b3d 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -118,10 +118,10 @@ def create_domain_at_point(self): """ Create domain file for this SinglePointCase class. """ - logging.info( + logger.info( "----------------------------------------------------------------------" ) - logging.info( + logger.info( "Creating domain file at " + self.plon.__str__() + " " @@ -144,7 +144,7 @@ def create_domain_at_point(self): wfile = self.fdomain_out f_out.to_netcdf(path=wfile, mode="w", format='NETCDF3_64BIT') - logging.info("Successfully created file (fdomain_out)" + self.fdomain_out) + logger.info("Successfully created file (fdomain_out)" + self.fdomain_out) f_in.close() f_out.close() @@ -152,10 +152,10 @@ def create_landuse_at_point(self): """ Create landuse file at a single point. """ - logging.info( + logger.info( "----------------------------------------------------------------------" ) - logging.info( + logger.info( "Creating landuse file at " + self.plon.__str__() + " " @@ -193,7 +193,7 @@ def create_landuse_at_point(self): wfile = self.fluse_out # mode 'w' overwrites file f_out.to_netcdf(path=wfile, mode="w", format='NETCDF3_64BIT') - logging.info("Successfully created file (luse_out)" + self.fluse_out + ".") + logger.info("Successfully created file (luse_out)" + self.fluse_out + ".") f_in.close() f_out.close() @@ -201,10 +201,10 @@ def create_surfdata_at_point(self): """ Create surface data file at a single point. """ - logging.info( + logger.info( "----------------------------------------------------------------------" ) - logging.info( + logger.info( "Creating surface dataset file at " + self.plon.__str__() + " " @@ -271,7 +271,7 @@ def create_surfdata_at_point(self): del f_out.attrs["History_Log"] # mode 'w' overwrites file f_out.to_netcdf(path=self.fsurf_out, mode="w", format = 'NETCDF3_64BIT') - logging.info("Successfully created file (fsurf_out) :" + self.fsurf_out) + logger.info("Successfully created file (fsurf_out) :" + self.fsurf_out) f_in.close() f_out.close() @@ -279,10 +279,10 @@ def create_datmdomain_at_point(self): """ Create DATM domain file at a single point """ - logging.info( + logger.info( "----------------------------------------------------------------------" ) - logging.info( + logger.info( "Creating DATM domain file at " + self.plon.__str__() + " " @@ -306,8 +306,8 @@ def create_datmdomain_at_point(self): f_out.attrs["Created_from"] = self.fdatmdomain_in # mode 'w' overwrites file - f_out.to_netcdf(path=wfile, mode="w") - logging.info( + f_out.to_netcdf(path=wfile, mode="w", format = 'NETCDF3_64BIT') + logger.info( "Successfully created file (fdatmdomain_out) :" + self.fdatmdomain_out ) f_in.close() @@ -332,7 +332,7 @@ def extract_datm_at(self, file_in, file_out): # mode 'w' overwrites file f_out.to_netcdf(path=file_out, mode="w") - logging.info("Successfully created file :" + file_out) + logger.info("Successfully created file :" + file_out) f_in.close() f_out.close() @@ -340,10 +340,10 @@ def create_datm_at_point(self): """ Create all DATM dataset at a point. """ - logging.info( + logger.info( "----------------------------------------------------------------------" ) - logging.info( + logger.info( "Creating DATM files at " + self.plon.__str__() + " " @@ -384,9 +384,9 @@ def create_datm_at_point(self): nm = len(infile) for n in range(nm): - logging.debug(outfile[n]) + logger.debug(outfile[n]) file_in = infile[n] file_out = outfile[n] self.extract_datm_at(file_in, file_out) - logging.info("All DATM files are created in: " + self.dir_output_datm + ".") + logger.info("All DATM files are created in: " + self.dir_output_datm + ".") diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index d11fc61df5..80ad979cdd 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -321,32 +321,36 @@ def get_parser(): default=os.path.join(os.getcwd(), "subset_data_" + parser_name), ) - pt_parser.add_argument('--datm-from-tower', - help='Create DATM forcing data at single point for a tower data.', - action="store_true", - dest="datm_tower", - required = False, - default = False) - pt_parser.add_argument('--create_user_mods', - help='Flag for creating user mods directory . [default: %(default)s]', - action="store", - dest="datm_tower", - type = str2bool, - nargs = '?', - const = True, - required = False, - default = False) - pt_parser.add_argument('--user_mods_dir', - help='Flag for creating user mods directory . [default: %(default)s]', - action="store", - dest="user_mod_dir", - type = str, - nargs = '?', - const = True, - required = False, - default = False) - - + pt_parser.add_argument( + "--datm-from-tower", + help="Create DATM forcing data at single point for a tower data.", + action="store_true", + dest="datm_tower", + required=False, + default=False, + ) + pt_parser.add_argument( + "--create_user_mods", + help="Flag for creating user mods directory . [default: %(default)s]", + action="store", + dest="datm_tower", + type=str2bool, + nargs="?", + const=True, + required=False, + default=False, + ) + pt_parser.add_argument( + "--user_mods_dir", + help="Flag for creating user mods directory . [default: %(default)s]", + action="store", + dest="user_mod_dir", + type=str, + nargs="?", + const=True, + required=False, + default=False, + ) # -- print help for both subparsers parser.epilog = textwrap.dedent( @@ -422,14 +426,14 @@ def main(): myname = getuser() pwd = os.getcwd() - logging.info("User = " + myname) - logging.info("Current directory = " + pwd) + logger.info("User = " + myname) + logger.info("Current directory = " + pwd) if args.run_type == "point": - logging.info( + logger.info( "----------------------------------------------------------------------------" ) - logging.info( + logger.info( "This script extracts a single point from the global CTSM inputdata datasets." ) @@ -480,14 +484,14 @@ def main(): single_point.create_tag() - logging.debug(single_point) + logger.debug(single_point) if crop_flag: num_pft = "78" else: num_pft = "16" - logging.debug("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) + logger.debug("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) # -- Set input and output filenames # -- Specify input and output directories @@ -506,8 +510,8 @@ def main(): if not os.path.isdir(dir_output_datm): os.mkdir(dir_output_datm) - logging.info("dir_input_datm : " + dir_input_datm) - logging.info("dir_output_datm : " + dir_output_datm) + logger.info("dir_input_datm : " + dir_input_datm) + logger.info("dir_output_datm : " + dir_output_datm) # -- Specify land domain file --------------------------------- fdomain_in = os.path.join( @@ -520,8 +524,8 @@ def main(): single_point.fdomain_in = fdomain_in single_point.fdomain_out = fdomain_out - logging.info("fdomain_in : " + fdomain_in) - logging.info("fdomain_out : " + fdomain_out) + logger.info("fdomain_in : " + fdomain_in) + logger.info("fdomain_out : " + fdomain_out) # -- Specify surface data file -------------------------------- if crop_flag: @@ -542,8 +546,8 @@ def main(): single_point.fsurf_in = fsurf_in single_point.fsurf_out = fsurf_out - logging.info("fsurf_in : " + fsurf_in) - logging.info("fsurf_out : " + fsurf_out) + logger.info("fsurf_in : " + fsurf_in) + logger.info("fsurf_out : " + fsurf_out) # -- Specify landuse file ------------------------------------- if crop_flag: @@ -562,8 +566,8 @@ def main(): single_point.fluse_in = fluse_in single_point.fluse_out = fluse_out - logging.info("fluse_in : " + fluse_in) - logging.info("fluse_out : " + fluse_out) + logger.info("fluse_in : " + fluse_in) + logger.info("fluse_out : " + fluse_out) # -- Specify datm domain file --------------------------------- fdatmdomain_in = os.path.join( @@ -577,8 +581,8 @@ def main(): single_point.fdatmdomain_in = fdatmdomain_in single_point.fdatmdomain_out = fdatmdomain_out - logging.info("fdatmdomain_in : " + fdatmdomain_in) - logging.info("fdatmdomain out : " + fdatmdomain_out) + logger.info("fdatmdomain_in : " + fdatmdomain_in) + logger.info("fdatmdomain out : " + fdatmdomain_out) # -- Create CTSM domain file if create_domain: @@ -601,13 +605,18 @@ def main(): single_point.dir_output_datm = dir_output_datm single_point.create_datm_at_point() - logging.info("Successfully ran script for single point.") + if create_domain or create_surfdata or create_landuse or create_datm: + logger.info("Successfully ran script for single point.") + else: + logger.warning( + "Please choose at least one of the following flags to create the files: \n --create-domain \n --create-surface \n --create-landuse \n --create-datm" + ) elif args.run_type == "region": - logging.info( + logger.info( "----------------------------------------------------------------------------" ) - logging.info( + logger.info( "This script extracts a single point from the global CTSM inputdata datasets." ) @@ -651,14 +660,14 @@ def main(): region.create_tag() - logging.debug(region) + logger.debug(region) if crop_flag: num_pft = "78" else: num_pft = "16" - logging.debug("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) + logger.debug("crop_flag = " + crop_flag.__str__() + " => num_pft =" + num_pft) # -- Set input and output filenames # -- Specify input and output directories @@ -681,37 +690,33 @@ def main(): region.fdomain_in = fdomain_in region.fdomain_out = fdomain_out - logging.info("fdomain_in : " + fdomain_in) - logging.info("fdomain_out : " + fdomain_out) + logger.info("fdomain_in : " + fdomain_in) + logger.info("fdomain_out : " + fdomain_out) # -- Specify surface data file -------------------------------- fsurf_in = os.path.join( dir_inputdata, "lnd/clm2/surfdata_map/surfdata_1.9x2.5_78pfts_CMIP6_simyr1850_c170824.nc", ) - fsurf_out = os.path.join( - dir_output, add_tag_to_filename(fsurf_in, region.tag) - ) + fsurf_out = os.path.join(dir_output, add_tag_to_filename(fsurf_in, region.tag)) region.fsurf_in = fsurf_in region.fsurf_out = fsurf_out - logging.info("fsurf_in : " + fdomain_in) - logging.info("fsurf_out : " + fdomain_out) + logger.info("fsurf_in : " + fdomain_in) + logger.info("fsurf_out : " + fdomain_out) # -- Specify landuse file ------------------------------------- fluse_in = os.path.join( dir_inputdata, "lnd/clm2/surfdata_map/landuse.timeseries_1.9x2.5_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc", ) - fluse_out = os.path.join( - dir_output, add_tag_to_filename(fluse_in, region.tag) - ) + fluse_out = os.path.join(dir_output, add_tag_to_filename(fluse_in, region.tag)) region.fluse_in = fluse_in region.fluse_out = fluse_out - logging.info("fluse_in : " + fdomain_in) - logging.info("fluse_out : " + fdomain_out) + logger.info("fluse_in : " + fdomain_in) + logger.info("fluse_out : " + fdomain_out) # -- Create CTSM domain file if create_domain: @@ -724,7 +729,12 @@ def main(): # -- Create CTSM transient landuse data file if create_landuse: region.create_landuse_at_reg() - logging.info("Successfully ran script for a regional case.") + if create_domain or create_surfdata or create_landuse or create_datm: + logger.info("Successfully ran script for a regional case.") + else: + logger.warning( + "Please choose at least one of the following flags to create the files: \n --create-domain \n --create-surface \n --create-landuse \n --create-mesh" + ) else: # print help when no option is chosen From 81b051c940feedbd625e24110995a28b80832e10 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 16 Dec 2021 15:02:11 -0700 Subject: [PATCH 065/223] update the docstrings for clarifications. --- .../ctsm/site_and_regional/regional_case.py | 44 +++++++++++++++---- python/ctsm/subset_data.py | 5 +++ tools/site_and_regional/subset_data | 12 ++--- 3 files changed, 46 insertions(+), 15 deletions(-) diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 5ad6874f81..64322872f9 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -71,7 +71,7 @@ def __init__( create_datm, ): """ - Initializes SinglePointCase with the given arguments. + Initializes RegionalCase with the given arguments. """ super().__init__(create_domain, create_surfdata, create_landuse, create_datm) self.lat1 = lat1 @@ -81,6 +81,11 @@ def __init__( self.reg_name = reg_name def create_tag(self): + """ + Create a tag for a region which is either the region name + or + the lat1-lat2_lon1-lon2 if the region name does not exist. + """ if self.reg_name: self.tag = self.reg_name else: @@ -95,12 +100,19 @@ def create_tag(self): ) def create_domain_at_reg(self): - # logging.debug ("Creating domain file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + """ + Create domain file for a region. + """ + logger.info( + "----------------------------------------------------------------------" + ) logger.info("Creating domain file at region:" + self.tag) + # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(self.fdomain_in, "xc", "yc", "ni", "nj") lat = f_in["lat"] lon = f_in["lon"] + # subset longitude and latitude arrays xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] @@ -112,19 +124,26 @@ def create_domain_at_reg(self): wfile = self.fdomain_out # mode 'w' overwrites file - f_out.to_netcdf(path=wfile, mode="w") + f_out.to_netcdf(path=wfile, mode="w", format='NETCDF3_64BIT') logger.info("Successfully created file (fdomain_out)" + self.fdomain_out) f_in.close() f_out.close() def create_surfdata_at_reg(self): - # logging.debug ("Creating surface dataset file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + """ + Create surface dataset for a region. + """ + logger.info( + "----------------------------------------------------------------------" + ) logger.info("Creating surface dataset file at region:" + self.tag) + # create 1d coordinate variables to enable sel() method filename = self.fsurf_in f_in = self.create_1d_coord(filename, "LONGXY", "LATIXY", "lsmlon", "lsmlat") lat = f_in["lat"] lon = f_in["lon"] + # subset longitude and latitude arrays xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] @@ -135,21 +154,28 @@ def create_surfdata_at_reg(self): f_out.attrs["Created_from"] = self.fsurf_in # mode 'w' overwrites file - f_out.to_netcdf(path=self.fsurf_out, mode="w") - logger.info("created file (fsurf_out)" + self.fsurf_out) - # f1.close(); + f_out.to_netcdf(path=self.fsurf_out, mode="w", format='NETCDF3_64BIT') + logger.info("Successfully created file (fsurf_out)" + self.fsurf_out) + f_in.close() f_out.close() def create_landuse_at_reg(self): - # logging.debug ("Creating landuse file at region"+ self.lon1.__str__()+"-"+self.lat2.__str__()+" "+self.lat1.__str__()+"-"+self.lat2.__str__()) + """ + Create landuse file for a region. + """ + logger.info( + "----------------------------------------------------------------------" + ) logger.info("Creating landuse file at region:" + self.tag) + # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord( self.fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat" ) lat = f_in["lat"] lon = f_in["lon"] + # subset longitude and latitude arrays xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] @@ -161,7 +187,7 @@ def create_landuse_at_reg(self): wfile = self.fluse_out # mode 'w' overwrites file - f_out.to_netcdf(path=wfile, mode="w") + f_out.to_netcdf(path=wfile, mode="w", format='NETCDF3_64BIT') logger.info("Successfully created file (fdomain_out)" + self.fdomain_out) f_in.close() f_out.close() diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 80ad979cdd..5e34eda039 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -55,6 +55,11 @@ By default, it only extracts surface dataset and for extracting other files, the appropriate flags should be used. + +To run this script the following packages are required: + - numpy + - xarray + ------------------------------------------------------------------- To run the script for a single point: ./subset_data.py point diff --git a/tools/site_and_regional/subset_data b/tools/site_and_regional/subset_data index dcfc957d5a..eaef35e3b8 100755 --- a/tools/site_and_regional/subset_data +++ b/tools/site_and_regional/subset_data @@ -11,12 +11,12 @@ please check python/ctsm/subset_data.py file. This script extracts domain files, surface dataset, and DATM files at either a single point or a region using the global dataset. - - -To see all available options for single-point subsetting: - ./subset_data point --help -To see all available options for region subsetting: - ./subset_data region --help +To run this script the following packages are required: + - numpy + - xarray +---------------------------------------------------------------- +To see all available options for single-point/regional subsetting: + ./subset_data --help """ import os From 00dd1c95e37da3203ff25fba954c220ad01a2ca7 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 16 Dec 2021 15:05:43 -0700 Subject: [PATCH 066/223] minor change --- python/ctsm/subset_data.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 5e34eda039..14f8fe9c6e 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -74,7 +74,6 @@ # TODO [NS]: # -[] Automatic downloading of missing files if they are missing -# default 78 pft vs 16 pft # -- Import libraries @@ -377,7 +376,7 @@ def plat_type(x): x(str): latitude Raises: - Error when x (latitude) is not between -90 and 90. + Error: ArgumentTypeError when x (latitude) is not between -90 and 90. Returns: x (float): latitude in float @@ -401,16 +400,14 @@ def plon_type(x): x (str): longitude Raises: - Error: when longitude is <-180 and >360. + Error: ArgumentTypeError when longitude is <-180 and >360. Returns: x(float): converted longitude between 0 and 360 """ x = float(x) if (-180 < x) and (x < 0): - print("lon is :", x) x = x % 360 - print("after modulo lon is :", x) if (x < 0) or (x > 360): raise argparse.ArgumentTypeError( "ERROR: Longitude of single point should be between 0 and 360 or -180 and 180." From 2b70310002195adad8959084763f08151b24e19b Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 16 Dec 2021 15:17:16 -0700 Subject: [PATCH 067/223] update git_utils to point to ctsm --- python/ctsm/git_utils.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py index f73dcebb0e..2da254f2f3 100644 --- a/python/ctsm/git_utils.py +++ b/python/ctsm/git_utils.py @@ -2,6 +2,7 @@ import logging import subprocess +from ctsm.path_utils import path_to_ctsm_root logger = logging.getLogger(__name__) @@ -10,7 +11,8 @@ def get_git_short_hash(): """ Returns Git short SHA for the currect directory. """ - sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() + sha = subprocess.check_output(['git', '-C', path_to_ctsm_root(), + 'rev-parse', '--short', 'HEAD']).strip().decode() return sha @@ -18,12 +20,12 @@ def get_git_long_hash(): """ Returns Git long SHA for the currect directory. """ - sha = subprocess.check_output(["git", "rev-parse", "HEAD"]).strip().decode() + sha = subprocess.check_output(["git", '-C', path_to_ctsm_root(), "rev-parse", "HEAD"]).strip().decode() return sha def get_git_describe(): """ Returns git describe output """ - label = subprocess.check_output(["git", "describe"]).strip() - return label.decode() + label = subprocess.check_output(["git", "describe"]).strip().decode() + return label From 3cc9fb9105230da94dc514f204803638051eda7c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 16 Dec 2021 15:17:55 -0700 Subject: [PATCH 068/223] remove these options for now. --- python/ctsm/subset_data.py | 23 +---------------------- 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 14f8fe9c6e..a7a1cf6b2b 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -333,28 +333,6 @@ def get_parser(): required=False, default=False, ) - pt_parser.add_argument( - "--create_user_mods", - help="Flag for creating user mods directory . [default: %(default)s]", - action="store", - dest="datm_tower", - type=str2bool, - nargs="?", - const=True, - required=False, - default=False, - ) - pt_parser.add_argument( - "--user_mods_dir", - help="Flag for creating user mods directory . [default: %(default)s]", - action="store", - dest="user_mod_dir", - type=str, - nargs="?", - const=True, - required=False, - default=False, - ) # -- print help for both subparsers parser.epilog = textwrap.dedent( @@ -731,6 +709,7 @@ def main(): # -- Create CTSM transient landuse data file if create_landuse: region.create_landuse_at_reg() + if create_domain or create_surfdata or create_landuse or create_datm: logger.info("Successfully ran script for a regional case.") else: From 9d443d2a02d7d2f376c38bfe89cd7f7c498857a1 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Thu, 16 Dec 2021 22:35:55 -0700 Subject: [PATCH 069/223] fixing typos --- python/ctsm/subset_data.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 0884c7e897..1fbb224d8f 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -133,7 +133,7 @@ def get_parser(): ) pt_parser.add_argument( "--variable-snow-fraction", - help="Creating datasets using variable snow fraction. If unset, snow is set to uniform " + help="Create surface data with variable snow fraction. If unset, snow is set to uniform " "fraction.", action="store_false", dest="uni_snow", @@ -142,7 +142,7 @@ def get_parser(): ) pt_parser.add_argument( "--allow-multiple-pft", - help="Creating dataset using multiple pft. If unset, it assumes the whole grid is 100% " + help="Create surface data with multiple PFTs. If unset, it assumes the whole grid is 100% " "single PFT set by --dom-pft.", action="store_false", dest="overwrite_single_pft", @@ -151,7 +151,7 @@ def get_parser(): ) pt_parser.add_argument( "--zero-nonveg", - help="Creating dataset by setting all non-vegetation landunits to zero.", + help="Set all non-vegetation landunits in the surface data to zero.", action="store_true", dest="zero_nonveg", required=False, @@ -159,7 +159,7 @@ def get_parser(): ) pt_parser.add_argument( "--allow-saturation-excess", - help="Creating dataset allowing saturatated conditions. If unset saturation_excess is set " + help="Create surface data allowing saturated conditions. If unset saturation excess is set " "to zero.", action="store_true", dest="saturation_excess", @@ -168,7 +168,7 @@ def get_parser(): ) pt_parser.add_argument( "--dompft", - help="Dominant PFT type if we set the grid to 100% one PFT [default: %(default)s].", + help="Dominant PFT if we set the grid to 100% one PFT [default: %(default)s].", action="store", dest="dom_pft", type=int, @@ -393,6 +393,7 @@ def setup_user_mods(user_mods_dir, cesmroot): for line in base_file: user_file.write(line) + def setup_files(args, defaults, cesmroot): """ Sets up the files and folders needed for this program From d6268b7e6fad3478deca49f4fbc949eef89a7549 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Sat, 18 Dec 2021 08:47:38 -0700 Subject: [PATCH 070/223] fixing issues from merge --- python/ctsm/site_and_regional/base_case.py | 3 +- python/ctsm/subset_data.py | 26 +++++++++++++---- subset_data_regional/user_mods/user_nl_clm | 20 +++++++++++++ .../user_mods/user_nl_datm_streams | 23 +++++++++++++++ .../user_mods/shell_commands | 10 +++++++ .../user_mods/user_nl_clm | 22 +++++++++++++++ .../user_mods/user_nl_datm_streams | 23 +++++++++++++++ tools/site_and_regional/data_afoster.cfg | 28 +++++++++++++++++++ 8 files changed, 147 insertions(+), 8 deletions(-) create mode 100644 subset_data_regional/user_mods/user_nl_clm create mode 100644 subset_data_regional/user_mods/user_nl_datm_streams create mode 100644 subset_data_single_point/user_mods/shell_commands create mode 100644 subset_data_single_point/user_mods/user_nl_clm create mode 100644 subset_data_single_point/user_mods/user_nl_datm_streams create mode 100644 tools/site_and_regional/data_afoster.cfg diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index dab157748c..45deb027ae 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -162,8 +162,7 @@ def update_metadata(nc_file): "source_code", "SVN_url", "hostname", - "history", - "History_Log", + "history History_Log", "Logname", "Host", "Version", diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 1fbb224d8f..6cc7279ca0 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -79,7 +79,7 @@ _CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", 'python')) sys.path.insert(1, _CTSM_PYTHON) -DEFAULTS_FILE = "default_data.cfg" +DEFAULTS_FILE = "data_afoster.cfg" logger = logging.getLogger(__name__) @@ -142,7 +142,7 @@ def get_parser(): ) pt_parser.add_argument( "--allow-multiple-pft", - help="Create surface data with multiple PFTs. If unset, it assumes the whole grid is 100% " + help="Create surface data with multiple PFTs. If unset, it assumes the whole grid is 100%% " "single PFT set by --dom-pft.", action="store_false", dest="overwrite_single_pft", @@ -168,7 +168,7 @@ def get_parser(): ) pt_parser.add_argument( "--dompft", - help="Dominant PFT if we set the grid to 100% one PFT [default: %(default)s].", + help="Dominant PFT if we set the grid to 100%% one PFT [default: %(default)s].", action="store", dest="dom_pft", type=int, @@ -230,7 +230,7 @@ def get_parser(): ) rg_parser.add_argument( "--create-mesh", - help="Flag for subsetting mesh file.", + help="Subset a mesh file for a region.", action="store_true", dest="create_mesh", required=False, @@ -321,6 +321,14 @@ def get_parser(): type=str, default=os.path.join(os.getcwd(), "subset_data_" + parser_name), ) + subparser.add_argument( + "--user-mods-dir", + help="User mods directory.", + action="store", + dest="user_mods_dir", + type=str, + default="", + ) # -- print help for both subparsers parser.epilog = textwrap.dedent( @@ -586,7 +594,13 @@ def main(): # print help and exit when no option is chosen if args.run_type != "point" and args.run_type != "region": - get_parser().print_help() + print("Must supply a positional argument: 'point' or 'region'.") + print("See ./subset_data --help for more help.") + sys.exit() + if not any([args.create_surfdata, args.create_domain, args.create_landuse, args.create_datm]): + print("Must supply one of:") + print(" --create-surface \n --create-landuse \n --create-datm \n --create-domain") + print("See ./subset_data --help for more help.") sys.exit() # create files and folders necessary and return dictionary of file/folder locations @@ -594,5 +608,5 @@ def main(): if args.run_type == "point": subset_point(args, file_dict) - elif args.run_type == "reg": + elif args.run_type == "region": subset_region(args, file_dict) diff --git a/subset_data_regional/user_mods/user_nl_clm b/subset_data_regional/user_mods/user_nl_clm new file mode 100644 index 0000000000..47865671a2 --- /dev/null +++ b/subset_data_regional/user_mods/user_nl_clm @@ -0,0 +1,20 @@ +!---------------------------------------------------------------------------------- +! Users should add all user specific namelist changes below in the form of +! namelist_var = new_namelist_value +! +! EXCEPTIONS: +! Set use_cndv by the compset you use and the CLM_BLDNML_OPTS -dynamic_vegetation setting +! Set use_vichydro by the compset you use and the CLM_BLDNML_OPTS -vichydro setting +! Set use_cn by the compset you use and CLM_BLDNML_OPTS -bgc setting +! Set use_crop by the compset you use and CLM_BLDNML_OPTS -crop setting +! Set spinup_state by the CLM_BLDNML_OPTS -bgc_spinup setting +! Set co2_ppmv with CCSM_CO2_PPMV option +! Set fatmlndfrc with LND_DOMAIN_PATH/LND_DOMAIN_FILE options +! Set finidat with RUN_REFCASE/RUN_REFDATE/RUN_REFTOD options for hybrid or branch cases +! (includes $inst_string for multi-ensemble cases) +! or with CLM_FORCE_COLDSTART to do a cold start +! or set it with an explicit filename here. +! Set maxpatch_glc with GLC_NEC option +! Set glc_do_dynglacier with GLC_TWO_WAY_COUPLING env variable +!---------------------------------------------------------------------------------- + diff --git a/subset_data_regional/user_mods/user_nl_datm_streams b/subset_data_regional/user_mods/user_nl_datm_streams new file mode 100644 index 0000000000..6472afeb8b --- /dev/null +++ b/subset_data_regional/user_mods/user_nl_datm_streams @@ -0,0 +1,23 @@ +!------------------------------------------------------------------------ +! This file is used to modify datm.streams.xml generated in $RUNDIR +! Entries should have the form +! :<= new stream_value> +! The following are accepted values for an assume streamname of foo +! foo:meshfile = character string +! foo:datafiles = comma separated string of full pathnames (e.g. file1,file2,file3...) +! foo:datavars = comma separated string of field pairs (e.g. foo foobar,foo2 foobar2...) +! foo:taxmode = one of [cycle, extend, limit] +! foo:tintalgo = one of [lower,upper,nearest,linear,coszen] +! foo:readmode = single (only suported mode right now) +! foo:mapalgo = one of [bilinear,redist,nn,consf,consd,none] +! foo:dtlimit = real (1.5 is default) +! foo:year_first = integer +! foo:year_last = integer +! foo:year_align = integer +! foo:vectors = one of [none,u:v] +! foo:lev_dimname: = one of [null,name of level dimenion name] +! foo:offset = integer +! As an example: +! foo:year_first = 1950 +! would change the stream year_first stream_entry to 1950 for the foo stream block +!------------------------------------------------------------------------ diff --git a/subset_data_single_point/user_mods/shell_commands b/subset_data_single_point/user_mods/shell_commands new file mode 100644 index 0000000000..ff95a31d75 --- /dev/null +++ b/subset_data_single_point/user_mods/shell_commands @@ -0,0 +1,10 @@ + +# Change below line if you move the subset data directory + +./xmlchange CLM_USRDAT_DIR=/Users/afoster/Documents/ctsm/ctsm_fates/subset_data_single_point + +./xmlchange PTS_LON=287.5 + +./xmlchange PTS_LAT=42.87958115183244 + +./xmlchange MPILIB=mpi-serial diff --git a/subset_data_single_point/user_mods/user_nl_clm b/subset_data_single_point/user_mods/user_nl_clm new file mode 100644 index 0000000000..0750d52454 --- /dev/null +++ b/subset_data_single_point/user_mods/user_nl_clm @@ -0,0 +1,22 @@ +!---------------------------------------------------------------------------------- +! Users should add all user specific namelist changes below in the form of +! namelist_var = new_namelist_value +! +! EXCEPTIONS: +! Set use_cndv by the compset you use and the CLM_BLDNML_OPTS -dynamic_vegetation setting +! Set use_vichydro by the compset you use and the CLM_BLDNML_OPTS -vichydro setting +! Set use_cn by the compset you use and CLM_BLDNML_OPTS -bgc setting +! Set use_crop by the compset you use and CLM_BLDNML_OPTS -crop setting +! Set spinup_state by the CLM_BLDNML_OPTS -bgc_spinup setting +! Set co2_ppmv with CCSM_CO2_PPMV option +! Set fatmlndfrc with LND_DOMAIN_PATH/LND_DOMAIN_FILE options +! Set finidat with RUN_REFCASE/RUN_REFDATE/RUN_REFTOD options for hybrid or branch cases +! (includes $inst_string for multi-ensemble cases) +! or with CLM_FORCE_COLDSTART to do a cold start +! or set it with an explicit filename here. +! Set maxpatch_glc with GLC_NEC option +! Set glc_do_dynglacier with GLC_TWO_WAY_COUPLING env variable +!---------------------------------------------------------------------------------- + + +fsurdat = '$CLM_USRDAT_DIR/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_287.8_42.5_c211218.nc' diff --git a/subset_data_single_point/user_mods/user_nl_datm_streams b/subset_data_single_point/user_mods/user_nl_datm_streams new file mode 100644 index 0000000000..6472afeb8b --- /dev/null +++ b/subset_data_single_point/user_mods/user_nl_datm_streams @@ -0,0 +1,23 @@ +!------------------------------------------------------------------------ +! This file is used to modify datm.streams.xml generated in $RUNDIR +! Entries should have the form +! :<= new stream_value> +! The following are accepted values for an assume streamname of foo +! foo:meshfile = character string +! foo:datafiles = comma separated string of full pathnames (e.g. file1,file2,file3...) +! foo:datavars = comma separated string of field pairs (e.g. foo foobar,foo2 foobar2...) +! foo:taxmode = one of [cycle, extend, limit] +! foo:tintalgo = one of [lower,upper,nearest,linear,coszen] +! foo:readmode = single (only suported mode right now) +! foo:mapalgo = one of [bilinear,redist,nn,consf,consd,none] +! foo:dtlimit = real (1.5 is default) +! foo:year_first = integer +! foo:year_last = integer +! foo:year_align = integer +! foo:vectors = one of [none,u:v] +! foo:lev_dimname: = one of [null,name of level dimenion name] +! foo:offset = integer +! As an example: +! foo:year_first = 1950 +! would change the stream year_first stream_entry to 1950 for the foo stream block +!------------------------------------------------------------------------ diff --git a/tools/site_and_regional/data_afoster.cfg b/tools/site_and_regional/data_afoster.cfg new file mode 100644 index 0000000000..0fde22b129 --- /dev/null +++ b/tools/site_and_regional/data_afoster.cfg @@ -0,0 +1,28 @@ +[main] +clmforcingindir = /Users/afoster/Documents/ctsm + +[datm_gswp3] +dir = /glade/p/cgd/tss/CTSM_datm_forcing_data/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 +domain = domain.lnd.360x720_gswp3.0v1.c170606.nc +solardir = Solar +precdir = Precip +tpqwdir = TPHWL +solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. +prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. +tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. +solarname = CLMGSWP3v1.Solar +precname = CLMGSWP3v1.Precip +tpqwname = CLMGSWP3v1.TPQW + +[surfdat] +dir = surfdata +surfdat_16pft = surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc +surfdat_78pft = surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc + +[landuse] +dir = lnd/clm2/surfdata_map/release-clm5.0.18 +landuse_16pft = landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc +landuse_78pft = landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc + +[domain] +file = domain.lnd.fv0.9x1.25_gx1v7.151020.nc From 9998e6d4256ef754b1ecc357d9dc8d49d10d5c52 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Sat, 18 Dec 2021 09:05:49 -0700 Subject: [PATCH 071/223] remove folders accidentally pushed --- python/ctsm/subset_data.py | 4 ++-- subset_data_regional/user_mods/user_nl_clm | 20 ---------------- .../user_mods/user_nl_datm_streams | 23 ------------------- .../user_mods/shell_commands | 10 -------- .../user_mods/user_nl_clm | 22 ------------------ .../user_mods/user_nl_datm_streams | 23 ------------------- 6 files changed, 2 insertions(+), 100 deletions(-) delete mode 100644 subset_data_regional/user_mods/user_nl_clm delete mode 100644 subset_data_regional/user_mods/user_nl_datm_streams delete mode 100644 subset_data_single_point/user_mods/shell_commands delete mode 100644 subset_data_single_point/user_mods/user_nl_clm delete mode 100644 subset_data_single_point/user_mods/user_nl_datm_streams diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 6cc7279ca0..cd1747fcb0 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -79,7 +79,7 @@ _CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", 'python')) sys.path.insert(1, _CTSM_PYTHON) -DEFAULTS_FILE = "data_afoster.cfg" +DEFAULTS_FILE = "default_data.cfg" logger = logging.getLogger(__name__) @@ -581,7 +581,7 @@ def main(): # parse defaults file cesmroot = path_to_ctsm_root() defaults = configparser.ConfigParser() - defaults.read(os.path.join(cesmroot, 'tools/site_and_regional', DEFAULTS_FILE)) + defaults.read(os.path.join(cesmroot, "tools/site_and_regional", DEFAULTS_FILE)) # --------------------------------- # diff --git a/subset_data_regional/user_mods/user_nl_clm b/subset_data_regional/user_mods/user_nl_clm deleted file mode 100644 index 47865671a2..0000000000 --- a/subset_data_regional/user_mods/user_nl_clm +++ /dev/null @@ -1,20 +0,0 @@ -!---------------------------------------------------------------------------------- -! Users should add all user specific namelist changes below in the form of -! namelist_var = new_namelist_value -! -! EXCEPTIONS: -! Set use_cndv by the compset you use and the CLM_BLDNML_OPTS -dynamic_vegetation setting -! Set use_vichydro by the compset you use and the CLM_BLDNML_OPTS -vichydro setting -! Set use_cn by the compset you use and CLM_BLDNML_OPTS -bgc setting -! Set use_crop by the compset you use and CLM_BLDNML_OPTS -crop setting -! Set spinup_state by the CLM_BLDNML_OPTS -bgc_spinup setting -! Set co2_ppmv with CCSM_CO2_PPMV option -! Set fatmlndfrc with LND_DOMAIN_PATH/LND_DOMAIN_FILE options -! Set finidat with RUN_REFCASE/RUN_REFDATE/RUN_REFTOD options for hybrid or branch cases -! (includes $inst_string for multi-ensemble cases) -! or with CLM_FORCE_COLDSTART to do a cold start -! or set it with an explicit filename here. -! Set maxpatch_glc with GLC_NEC option -! Set glc_do_dynglacier with GLC_TWO_WAY_COUPLING env variable -!---------------------------------------------------------------------------------- - diff --git a/subset_data_regional/user_mods/user_nl_datm_streams b/subset_data_regional/user_mods/user_nl_datm_streams deleted file mode 100644 index 6472afeb8b..0000000000 --- a/subset_data_regional/user_mods/user_nl_datm_streams +++ /dev/null @@ -1,23 +0,0 @@ -!------------------------------------------------------------------------ -! This file is used to modify datm.streams.xml generated in $RUNDIR -! Entries should have the form -! :<= new stream_value> -! The following are accepted values for an assume streamname of foo -! foo:meshfile = character string -! foo:datafiles = comma separated string of full pathnames (e.g. file1,file2,file3...) -! foo:datavars = comma separated string of field pairs (e.g. foo foobar,foo2 foobar2...) -! foo:taxmode = one of [cycle, extend, limit] -! foo:tintalgo = one of [lower,upper,nearest,linear,coszen] -! foo:readmode = single (only suported mode right now) -! foo:mapalgo = one of [bilinear,redist,nn,consf,consd,none] -! foo:dtlimit = real (1.5 is default) -! foo:year_first = integer -! foo:year_last = integer -! foo:year_align = integer -! foo:vectors = one of [none,u:v] -! foo:lev_dimname: = one of [null,name of level dimenion name] -! foo:offset = integer -! As an example: -! foo:year_first = 1950 -! would change the stream year_first stream_entry to 1950 for the foo stream block -!------------------------------------------------------------------------ diff --git a/subset_data_single_point/user_mods/shell_commands b/subset_data_single_point/user_mods/shell_commands deleted file mode 100644 index ff95a31d75..0000000000 --- a/subset_data_single_point/user_mods/shell_commands +++ /dev/null @@ -1,10 +0,0 @@ - -# Change below line if you move the subset data directory - -./xmlchange CLM_USRDAT_DIR=/Users/afoster/Documents/ctsm/ctsm_fates/subset_data_single_point - -./xmlchange PTS_LON=287.5 - -./xmlchange PTS_LAT=42.87958115183244 - -./xmlchange MPILIB=mpi-serial diff --git a/subset_data_single_point/user_mods/user_nl_clm b/subset_data_single_point/user_mods/user_nl_clm deleted file mode 100644 index 0750d52454..0000000000 --- a/subset_data_single_point/user_mods/user_nl_clm +++ /dev/null @@ -1,22 +0,0 @@ -!---------------------------------------------------------------------------------- -! Users should add all user specific namelist changes below in the form of -! namelist_var = new_namelist_value -! -! EXCEPTIONS: -! Set use_cndv by the compset you use and the CLM_BLDNML_OPTS -dynamic_vegetation setting -! Set use_vichydro by the compset you use and the CLM_BLDNML_OPTS -vichydro setting -! Set use_cn by the compset you use and CLM_BLDNML_OPTS -bgc setting -! Set use_crop by the compset you use and CLM_BLDNML_OPTS -crop setting -! Set spinup_state by the CLM_BLDNML_OPTS -bgc_spinup setting -! Set co2_ppmv with CCSM_CO2_PPMV option -! Set fatmlndfrc with LND_DOMAIN_PATH/LND_DOMAIN_FILE options -! Set finidat with RUN_REFCASE/RUN_REFDATE/RUN_REFTOD options for hybrid or branch cases -! (includes $inst_string for multi-ensemble cases) -! or with CLM_FORCE_COLDSTART to do a cold start -! or set it with an explicit filename here. -! Set maxpatch_glc with GLC_NEC option -! Set glc_do_dynglacier with GLC_TWO_WAY_COUPLING env variable -!---------------------------------------------------------------------------------- - - -fsurdat = '$CLM_USRDAT_DIR/surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_287.8_42.5_c211218.nc' diff --git a/subset_data_single_point/user_mods/user_nl_datm_streams b/subset_data_single_point/user_mods/user_nl_datm_streams deleted file mode 100644 index 6472afeb8b..0000000000 --- a/subset_data_single_point/user_mods/user_nl_datm_streams +++ /dev/null @@ -1,23 +0,0 @@ -!------------------------------------------------------------------------ -! This file is used to modify datm.streams.xml generated in $RUNDIR -! Entries should have the form -! :<= new stream_value> -! The following are accepted values for an assume streamname of foo -! foo:meshfile = character string -! foo:datafiles = comma separated string of full pathnames (e.g. file1,file2,file3...) -! foo:datavars = comma separated string of field pairs (e.g. foo foobar,foo2 foobar2...) -! foo:taxmode = one of [cycle, extend, limit] -! foo:tintalgo = one of [lower,upper,nearest,linear,coszen] -! foo:readmode = single (only suported mode right now) -! foo:mapalgo = one of [bilinear,redist,nn,consf,consd,none] -! foo:dtlimit = real (1.5 is default) -! foo:year_first = integer -! foo:year_last = integer -! foo:year_align = integer -! foo:vectors = one of [none,u:v] -! foo:lev_dimname: = one of [null,name of level dimenion name] -! foo:offset = integer -! As an example: -! foo:year_first = 1950 -! would change the stream year_first stream_entry to 1950 for the foo stream block -!------------------------------------------------------------------------ From 289d3a771ee8656a8666dad1e0463f43f0e3ead4 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Sat, 18 Dec 2021 09:07:13 -0700 Subject: [PATCH 072/223] remove personal defaults file --- tools/site_and_regional/data_afoster.cfg | 28 ------------------------ 1 file changed, 28 deletions(-) delete mode 100644 tools/site_and_regional/data_afoster.cfg diff --git a/tools/site_and_regional/data_afoster.cfg b/tools/site_and_regional/data_afoster.cfg deleted file mode 100644 index 0fde22b129..0000000000 --- a/tools/site_and_regional/data_afoster.cfg +++ /dev/null @@ -1,28 +0,0 @@ -[main] -clmforcingindir = /Users/afoster/Documents/ctsm - -[datm_gswp3] -dir = /glade/p/cgd/tss/CTSM_datm_forcing_data/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 -domain = domain.lnd.360x720_gswp3.0v1.c170606.nc -solardir = Solar -precdir = Precip -tpqwdir = TPHWL -solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. -prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. -tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. -solarname = CLMGSWP3v1.Solar -precname = CLMGSWP3v1.Precip -tpqwname = CLMGSWP3v1.TPQW - -[surfdat] -dir = surfdata -surfdat_16pft = surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc -surfdat_78pft = surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc - -[landuse] -dir = lnd/clm2/surfdata_map/release-clm5.0.18 -landuse_16pft = landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc -landuse_78pft = landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc - -[domain] -file = domain.lnd.fv0.9x1.25_gx1v7.151020.nc From 2b309846805add306fd42b39b5b6416f377aad34 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Tue, 21 Dec 2021 14:00:25 -0700 Subject: [PATCH 073/223] update fdomain file name --- python/ctsm/site_and_regional/single_point_case.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 6fe0633c22..b73c5f4485 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -148,7 +148,7 @@ def create_domain_at_point(self, indir, file): f_out.attrs["Created_from"] = fdomain_in wfile = os.path.join(self.output_dir, fdomain_out) - f_out.to_netcdf(path=fdomain_out, mode="w", format="NETCDF3_64BIT") + f_out.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") logger.info("Successfully created file (fdomain_out) %s", wfile) f_in.close() f_out.close() From e04bf2f947591bfc6888da11f1fa0abc4ff21baa Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Mon, 27 Dec 2021 09:39:59 -0700 Subject: [PATCH 074/223] remove duplicated argparse argument from merge --- python/ctsm/subset_data.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index d801b098de..cd1747fcb0 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -329,14 +329,6 @@ def get_parser(): type=str, default="", ) - pt_parser.add_argument( - "--datm-from-tower", - help="Create DATM forcing data at single point for a tower data.", - action="store_true", - dest="datm_tower", - required=False, - default=False, - ) # -- print help for both subparsers parser.epilog = textwrap.dedent( From 2e73d8ae737d34d3e0a6ce848e31d36c2eb2113c Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Mon, 27 Dec 2021 18:49:00 -0700 Subject: [PATCH 075/223] Update externasl to point to new cdeps --- Externals.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals.cfg b/Externals.cfg index 10963cfd20..a06d5851c9 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -48,7 +48,7 @@ local_path = components/cmeps required = True [cdeps] -tag = cdeps0.12.32 +tag = cdeps0.12.34 protocol = git repo_url = https://github.com/ESCOMP/CDEPS.git local_path = components/cdeps From 7d4c22caf7b1808b84738fc4279a59c59c1ae17b Mon Sep 17 00:00:00 2001 From: Ryan Knox Date: Fri, 31 Dec 2021 14:36:07 -0500 Subject: [PATCH 076/223] Cleaning up fates history interface --- src/utils/clmfates_interfaceMod.F90 | 39 ++++++----------------------- 1 file changed, 8 insertions(+), 31 deletions(-) diff --git a/src/utils/clmfates_interfaceMod.F90 b/src/utils/clmfates_interfaceMod.F90 index f3106e1415..4c578bd8ec 100644 --- a/src/utils/clmfates_interfaceMod.F90 +++ b/src/utils/clmfates_interfaceMod.F90 @@ -70,9 +70,9 @@ module CLMFatesInterfaceMod use clm_varpar , only : numrad use clm_varpar , only : ivis use clm_varpar , only : inir - use clm_varpar , only : nlevgrnd use clm_varpar , only : nlevdecomp use clm_varpar , only : nlevdecomp_full + use clm_varpar , only : nlevsoi use PhotosynthesisMod , only : photosyns_type use atm2lndType , only : atm2lnd_type use SurfaceAlbedoType , only : surfalb_type @@ -122,7 +122,6 @@ module CLMFatesInterfaceMod use EDTypesMod , only : ed_patch_type use PRTGenericMod , only : num_elements - use FatesInterfaceTypesMod, only : hlm_numlevgrnd use FatesInterfaceTypesMod, only : hlm_stepsize use EDMainMod , only : ed_ecosystem_dynamics use EDMainMod , only : ed_update_site @@ -286,7 +285,7 @@ subroutine CLMFatesGlobals() call set_fates_ctrlparms('vis_sw_index',ival=ivis) call set_fates_ctrlparms('nir_sw_index',ival=inir) - call set_fates_ctrlparms('num_lev_ground',ival=nlevgrnd) + call set_fates_ctrlparms('num_lev_soil',ival=nlevsoi) call set_fates_ctrlparms('hlm_name',cval='CLM') call set_fates_ctrlparms('hio_ignore_val',rval=spval) call set_fates_ctrlparms('soilwater_ipedof',ival=get_ipedof(0)) @@ -2571,8 +2570,7 @@ subroutine init_history_io(this,bounds_proc) use histFileMod, only : hist_addfld1d, hist_addfld2d, hist_addfld_decomp use FatesConstantsMod, only : fates_short_string_length, fates_long_string_length - use FatesIOVariableKindMod, only : patch_r8, patch_ground_r8, patch_size_pft_r8 - use FatesIOVariableKindMod, only : site_r8, site_ground_r8, site_size_pft_r8 + use FatesIOVariableKindMod, only : site_r8, site_soil_r8, site_size_pft_r8 use FatesIOVariableKindMod, only : site_size_r8, site_pft_r8, site_age_r8 use FatesIOVariableKindMod, only : site_coage_r8, site_coage_pft_r8 use FatesIOVariableKindMod, only : site_fuel_r8, site_cwdsc_r8, site_scag_r8 @@ -2669,13 +2667,6 @@ subroutine init_history_io(this,bounds_proc) ioname = trim(fates_hist%dim_kinds(dk_index)%name) select case(trim(ioname)) - case(patch_r8) - call hist_addfld1d(fname=trim(vname),units=trim(vunits), & - avgflag=trim(vavgflag),long_name=trim(vlong), & - ptr_patch=fates_hist%hvars(ivar)%r81d, & - default=trim(vdefault), & - set_lake=0._r8,set_urb=0._r8) - case(site_r8) call hist_addfld1d(fname=trim(vname),units=trim(vunits), & avgflag=trim(vavgflag),long_name=trim(vlong), & @@ -2683,18 +2674,7 @@ subroutine init_history_io(this,bounds_proc) default=trim(vdefault), & set_lake=0._r8,set_urb=0._r8) - case(patch_ground_r8, patch_size_pft_r8) - - d_index = fates_hist%dim_kinds(dk_index)%dim2_index - dim2name = fates_hist%dim_bounds(d_index)%name - call hist_addfld2d(fname=trim(vname),units=trim(vunits), & ! <--- addfld2d - type2d=trim(dim2name), & ! <--- type2d - avgflag=trim(vavgflag),long_name=trim(vlong), & - ptr_patch=fates_hist%hvars(ivar)%r82d, & - default=trim(vdefault)) - - - case(site_ground_r8, site_size_pft_r8, site_size_r8, site_pft_r8, & + case(site_soil_r8, site_size_pft_r8, site_size_r8, site_pft_r8, & site_age_r8, site_height_r8, site_coage_r8,site_coage_pft_r8, & site_fuel_r8, site_cwdsc_r8, & site_can_r8,site_cnlf_r8, site_cnlfpft_r8, site_scag_r8, & @@ -2991,7 +2971,7 @@ subroutine hlm_bounds_to_fates_bounds(hlm, fates) use FatesLitterMod, only : ncwd use EDtypesMod, only : nlevleaf, nclmax use FatesInterfaceTypesMod, only : numpft_fates => numpft - use clm_varpar, only : nlevgrnd + implicit none @@ -3003,14 +2983,11 @@ subroutine hlm_bounds_to_fates_bounds(hlm, fates) fates%cohort_begin = hlm%begcohort fates%cohort_end = hlm%endcohort - fates%patch_begin = hlm%begp - fates%patch_end = hlm%endp - fates%column_begin = hlm%begc fates%column_end = hlm%endc - - fates%ground_begin = 1 - fates%ground_end = nlevgrnd + + fates%soil_begin = 1 + fates%soil_end = nlevsoi fates%sizepft_class_begin = 1 fates%sizepft_class_end = nlevsclass * numpft_fates From c642aa5e4dd9b68ea9b372fbe54515402e98c831 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Mon, 3 Jan 2022 10:41:07 -0700 Subject: [PATCH 077/223] merge conflict resolved. --- tools/site_and_regional/subset_data | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/tools/site_and_regional/subset_data b/tools/site_and_regional/subset_data index 0e4550e281..680de27aff 100755 --- a/tools/site_and_regional/subset_data +++ b/tools/site_and_regional/subset_data @@ -9,20 +9,11 @@ For full instructions on how to run the code and different options, please check python/ctsm/subset_data.py file. This script extracts domain files, surface dataset, and DATM files at either a single point or a region using the global dataset. -<<<<<<< HEAD -To see all available options for single-point subsetting: - ./subset_data point --help -To see all available options for region subsetting: - ./subset_data region --help -||||||| a6f23ca69 - - To see all available options for single-point subsetting: ./subset_data point --help To see all available options for region subsetting: ./subset_data region --help -======= To run this script the following packages are required: - numpy @@ -30,7 +21,6 @@ To run this script the following packages are required: ---------------------------------------------------------------- To see all available options for single-point/regional subsetting: ./subset_data --help ->>>>>>> python_dev_meeting """ import os From cc34e12b5fc8eb1d82bd589e0187f43712e92cbc Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Mon, 3 Jan 2022 10:45:45 -0700 Subject: [PATCH 078/223] updates to ctsm_logging.py --- python/ctsm/ctsm_logging.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/python/ctsm/ctsm_logging.py b/python/ctsm/ctsm_logging.py index f129c200cf..79064b6887 100644 --- a/python/ctsm/ctsm_logging.py +++ b/python/ctsm/ctsm_logging.py @@ -31,6 +31,7 @@ logger = logging.getLogger(__name__) + def setup_logging_pre_config(): """Setup logging for a script / application @@ -41,12 +42,14 @@ def setup_logging_pre_config(): """ setup_logging(level=logging.WARNING) + def setup_logging_for_tests(enable_critical=False): """Setup logging as appropriate for unit tests""" setup_logging(level=logging.CRITICAL) if not enable_critical: logging.disable(logging.CRITICAL) + def setup_logging(level=logging.WARNING): """Setup logging for a script / application @@ -54,18 +57,24 @@ def setup_logging(level=logging.WARNING): do NOT intend to allow the user to control logging preferences via command-line arguments, so that all of the final logging options are set here. """ - logging.basicConfig(format='%(levelname)s: %(message)s', level=level) + logging.basicConfig(format="%(levelname)s: %(message)s", level=level) + def add_logging_args(parser): """Add common logging-related options to the argument parser""" logging_level = parser.add_mutually_exclusive_group() - logging_level.add_argument('-v', '--verbose', action='store_true', - help='Output extra logging info') + logging_level.add_argument( + "-v", "--verbose", action="store_true", help="Output extra logging info" + ) + + logging_level.add_argument( + "--debug", + action="store_true", + help="Output even more logging info for debugging", + ) - logging_level.add_argument('--debug', action='store_true', - help='Output even more logging info for debugging') def process_logging_args(args): """Configure logging based on the logging-related args added by add_logging_args""" @@ -78,12 +87,12 @@ def process_logging_args(args): else: root_logger.setLevel(logging.WARNING) -def output_to_file(filepath, message, log_to_logger=False): + +def output_to_file(file_path, message, log_to_logger=False): """ helper function to write to log file. """ - with open(filepath, 'a') as fl: + with open(file_path, "a") as fl: fl.write(message) if log_to_logger: logger.info(message) - From 0f09b38c744f4e736d640c5122ffbd7bc4d02ba8 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Mon, 3 Jan 2022 10:59:04 -0700 Subject: [PATCH 079/223] updates. --- python/ctsm/utils.py | 140 +++++++++++++++++++++++++++++-------------- 1 file changed, 95 insertions(+), 45 deletions(-) diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 5213106499..93e7ef8ab1 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -16,10 +16,11 @@ # This string is used in the out-of-the-box ctsm.cfg and modify.cfg files # to denote a value that needs to be filled in -_CONFIG_PLACEHOLDER = 'FILL_THIS_IN' +_CONFIG_PLACEHOLDER = "FILL_THIS_IN" # This string is used in the out-of-the-box ctsm.cfg and modify.cfg files # to denote a value that can be filled in, but doesn't absolutely need to be -_CONFIG_UNSET = 'UNSET' +_CONFIG_UNSET = "UNSET" + def abort(errmsg): """Abort the program with the given error message @@ -49,6 +50,7 @@ def fill_template_file(path_to_template, path_to_final, substitutions): with open(path_to_final, "w") as final_file: final_file.write(final_file_contents) + def str2bool(var): """ Function for converting different forms of @@ -74,18 +76,30 @@ def str2bool(var): else: raise ValueError("Boolean value expected. [true or false] or [y or n]") + def get_git_sha(): """ Returns Git short SHA for the currect directory. """ - return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode() + return ( + subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) + .strip() + .decode() + ) + def get_ctsm_git_sha(): """ Returns Git short SHA for the ctsm directory. """ - return subprocess.check_output(['git', '-C', path_to_ctsm_root(), - 'rev-parse', '--short', 'HEAD']).strip().decode() + return ( + subprocess.check_output( + ["git", "-C", path_to_ctsm_root(), "rev-parse", "--short", "HEAD"] + ) + .strip() + .decode() + ) + def add_tag_to_filename(filename, tag): """ @@ -113,9 +127,7 @@ def add_tag_to_filename(filename, tag): if basename[cend] == "c": cend = cend - 1 if (basename[cend] != ".") and (basename[cend] != "_"): - logging.error( - "Trouble figuring out where to add tag to filename:" + filename - ) + logger.error("Trouble figuring out where to add tag to filename:" + filename) os.abort() today = date.today() today_string = today.strftime("%y%m%d") @@ -143,29 +155,37 @@ def update_metadata(file, title, summary, contact, data_script, description): would be good (sys.argv) here or in data_script. """ - #update attributes + # update attributes today = date.today() today_string = today.strftime("%Y-%m-%d") # This is the required metadata for inputdata files - file.attrs['title'] = title - file.attrs['summary'] = summary - file.attrs['creator'] = getuser() - file.attrs['contact'] = contact - file.attrs['creation_date'] = today_string - file.attrs['data_script'] = data_script - file.attrs['description'] = description - - #delete unrelated attributes if they exist - del_attrs = ['source_code', 'SVN_url', 'hostname', 'history' - 'History_Log', 'Logname', 'Host', 'Version', - 'Compiler_Optimized'] + file.attrs["title"] = title + file.attrs["summary"] = summary + file.attrs["creator"] = getuser() + file.attrs["contact"] = contact + file.attrs["creation_date"] = today_string + file.attrs["data_script"] = data_script + file.attrs["description"] = description + + # delete unrelated attributes if they exist + del_attrs = [ + "source_code", + "SVN_url", + "hostname", + "history" "History_Log", + "Logname", + "Host", + "Version", + "Compiler_Optimized", + ] attr_list = file.attrs for attr in del_attrs: if attr in attr_list: del file.attrs[attr] + def lon_range_0_to_360(lon_in): """ Description @@ -174,19 +194,31 @@ def lon_range_0_to_360(lon_in): """ if -180 <= lon_in < 0: lon_out = lon_in + 360 - logger.info('Resetting longitude from %s to %s to keep in the range ' \ - ' 0 to 360', str(lon_in), str(lon_out)) + logger.info( + "Resetting longitude from %s to %s to keep in the range " " 0 to 360", + str(lon_in), + str(lon_out), + ) elif 0 <= lon_in <= 360 or lon_in is None: lon_out = lon_in else: - errmsg = 'lon_in needs to be in the range 0 to 360' + errmsg = "lon_in needs to be in the range 0 to 360" abort(errmsg) return lon_out -def get_config_value(config, section, item, file_path, allowed_values=None, - default=None, is_list=False, convert_to_type=None, - can_be_unset=False): + +def get_config_value( + config, + section, + item, + file_path, + allowed_values=None, + default=None, + is_list=False, + convert_to_type=None, + can_be_unset=False, +): """Get a given item from a given section of the config object Give a helpful error message if we can't find the given section or item Note that the file_path argument is only used for the sake of the error message @@ -199,22 +231,37 @@ def get_config_value(config, section, item, file_path, allowed_values=None, try: val = config.get(section, item) except NoSectionError: - abort("ERROR: Config file {} must contain section '{}'".format(file_path, section)) + abort( + "ERROR: Config file {} must contain section '{}'".format(file_path, section) + ) except NoOptionError: - abort("ERROR: Config file {} must contain item '{}' in section '{}'".format( - file_path, item, section)) + abort( + "ERROR: Config file {} must contain item '{}' in section '{}'".format( + file_path, item, section + ) + ) if val == _CONFIG_PLACEHOLDER: - abort("Error: {} needs to be specified in config file {}".format(item, file_path)) + abort( + "Error: {} needs to be specified in config file {}".format(item, file_path) + ) - val = _handle_config_value(var=val, default=default, item=item, - is_list=is_list, convert_to_type=convert_to_type, - can_be_unset=can_be_unset, allowed_values=allowed_values) + val = _handle_config_value( + var=val, + default=default, + item=item, + is_list=is_list, + convert_to_type=convert_to_type, + can_be_unset=can_be_unset, + allowed_values=allowed_values, + ) return val -def _handle_config_value(var, default, item, is_list, convert_to_type, - can_be_unset, allowed_values): + +def _handle_config_value( + var, default, item, is_list, convert_to_type, can_be_unset, allowed_values +): """ Description ----------- @@ -226,7 +273,7 @@ def _handle_config_value(var, default, item, is_list, convert_to_type, if var == _CONFIG_UNSET: if can_be_unset: return default # default may be None - abort('Must set a value for .cfg file variable: {}'.format(item)) + abort("Must set a value for .cfg file variable: {}".format(item)) # convert string to list of strings; if there is just one element, # we will get a list of size one, which we will convert back to a @@ -237,35 +284,38 @@ def _handle_config_value(var, default, item, is_list, convert_to_type, try: var = [_convert_to_bool(v) for v in var] except ValueError: - abort('Non-boolean value found for .cfg file variable: {}'.format(item)) + abort("Non-boolean value found for .cfg file variable: {}".format(item)) elif convert_to_type is not None: try: var = [convert_to_type(v) for v in var] except ValueError: - abort('Wrong type for .cfg file variable: {}'.format(item)) + abort("Wrong type for .cfg file variable: {}".format(item)) if allowed_values is not None: for val in var: if val not in allowed_values: - print('val = ', val, ' in var not in allowed_values') - errmsg = '{} is not an allowed value for {} in .cfg file. ' \ - 'Check allowed_values'.format(val, item) + print("val = ", val, " in var not in allowed_values") + errmsg = ( + "{} is not an allowed value for {} in .cfg file. " + "Check allowed_values".format(val, item) + ) abort(errmsg) if not is_list: if len(var) > 1: - abort('More than 1 element found for .cfg file variable: {}'.format(item)) + abort("More than 1 element found for .cfg file variable: {}".format(item)) var = var[0] return var + def _convert_to_bool(val): """Convert the given value to boolean Conversion is as in config files 'getboolean' """ - if val.lower() in ['1', 'yes', 'true', 'on']: + if val.lower() in ["1", "yes", "true", "on"]: return True - if val.lower() in ['0', 'no', 'false', 'off']: + if val.lower() in ["0", "no", "false", "off"]: return False raise ValueError("{} cannot be converted to boolean".format(val)) From f4131c9f7d46aae868372f695927c55751fc3899 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Mon, 3 Jan 2022 11:20:36 -0700 Subject: [PATCH 080/223] updates. --- python/ctsm/subset_data.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index cd1747fcb0..f84defc9c6 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -345,6 +345,7 @@ def plat_type(plat): Function to define lat type for the parser and raise error if latitude is not between -90 and 90. + Args: plat(str): latitude Raises: @@ -363,6 +364,7 @@ def plon_type(plon): Function to define lon type for the parser and convert negative longitudes and raise error if lon is not between -180 and 360. + Args: plon (str): longitude Raises: @@ -372,9 +374,9 @@ def plon_type(plon): """ plon = float(plon) if -180 <= plon < 0: - logger.info("lon is: %f", plon) + logger.debug("lon is: %f", plon) plon = plon % 360 - logger.info("after modulo lon is: %f", plon) + logger.debug("after modulo lon is: %f", plon) if plon < 0 or plon > 360: raise argparse.ArgumentTypeError("ERROR: Longitude of single point should be between 0 and " "360 or -180 and 180.") From d509fe28d30d25438525c3f8a28b1e343fe0cf99 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Mon, 3 Jan 2022 11:35:51 -0700 Subject: [PATCH 081/223] update git_utils. --- python/ctsm/git_utils.py | 45 +++++++++++++++++++++++++++++++++++----- 1 file changed, 40 insertions(+), 5 deletions(-) diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py index 2da254f2f3..868040c269 100644 --- a/python/ctsm/git_utils.py +++ b/python/ctsm/git_utils.py @@ -10,22 +10,57 @@ def get_git_short_hash(): """ Returns Git short SHA for the currect directory. + + Args: + + Raises: + + Returns: + sha (str) : git short hash for ctsm repository """ - sha = subprocess.check_output(['git', '-C', path_to_ctsm_root(), - 'rev-parse', '--short', 'HEAD']).strip().decode() + sha = ( + subprocess.check_output( + ["git", "-C", path_to_ctsm_root(), "rev-parse", "--short", "HEAD"] + ) + .strip() + .decode() + ) return sha def get_git_long_hash(): """ Returns Git long SHA for the currect directory. + + Args: + + Raises: + + Returns: + sha (str) : git long hash for ctsm repository """ - sha = subprocess.check_output(["git", '-C', path_to_ctsm_root(), "rev-parse", "HEAD"]).strip().decode() + sha = ( + subprocess.check_output(["git", "-C", path_to_ctsm_root(), "rev-parse", "HEAD"]) + .strip() + .decode() + ) return sha + def get_git_describe(): """ - Returns git describe output + Function for giving the recent tag of the git repo + + Args: + + Raises: + + Returns: + label (str) : ouput of running 'git describe' in shell """ - label = subprocess.check_output(["git", "describe"]).strip().decode() + label = ( + subprocess.check_output(["git", "describe", path_to_ctsm_root()]) + .strip() + .decode() + ) return label From dde94fe89d4f05037b46a3d091d9f535ec91c176 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Mon, 3 Jan 2022 11:46:16 -0700 Subject: [PATCH 082/223] update logging --- python/ctsm/ctsm_logging.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/ctsm/ctsm_logging.py b/python/ctsm/ctsm_logging.py index 79064b6887..ff51c6d8f2 100644 --- a/python/ctsm/ctsm_logging.py +++ b/python/ctsm/ctsm_logging.py @@ -92,7 +92,7 @@ def output_to_file(file_path, message, log_to_logger=False): """ helper function to write to log file. """ - with open(file_path, "a") as fl: - fl.write(message) + with open(file_path, "a") as log_file: + log_file.write(message) if log_to_logger: logger.info(message) From 4e6c6a23aa7f07ea18e7356a1202795d17ea6803 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Mon, 3 Jan 2022 12:15:48 -0700 Subject: [PATCH 083/223] minor edits. --- python/ctsm/site_and_regional/base_case.py | 3 ++- python/ctsm/test/test_unit_subset_data.py | 17 +++++++++++++++-- python/ctsm/utils.py | 12 +++++++----- 3 files changed, 24 insertions(+), 8 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 45deb027ae..dab157748c 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -162,7 +162,8 @@ def update_metadata(nc_file): "source_code", "SVN_url", "hostname", - "history History_Log", + "history", + "History_Log", "Logname", "Host", "Version", diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py index b972a20986..4276343c4e 100644 --- a/python/ctsm/test/test_unit_subset_data.py +++ b/python/ctsm/test/test_unit_subset_data.py @@ -1,4 +1,7 @@ #!/usr/bin/env python3 +""" +Unit tests for subset_data +""" import unittest import argparse @@ -12,23 +15,33 @@ class TestSubsetData(unittest.TestCase): + """ + Tests for subset_data + """ def test_plonType_positive(self): + """ + Test plot_type with 30 + """ result = plon_type(30) self.assertEqual(result, 30.0) def test_plonType_negative(self): + """ + Test plot_type with -30 + """ result = plon_type(-30) self.assertEqual(result, 330.0) def test_plonType_outOfBounds(self): + """ + Test plot_type with 361 + """ with self.assertRaisesRegex( argparse.ArgumentTypeError, "Latitude.*should be between" ): _ = plon_type(361) -"""Unit tests for subset_data -""" if __name__ == "__main__": unit_testing.setup_for_tests() unittest.main() diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 93e7ef8ab1..cb77a25f46 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -65,17 +65,18 @@ def str2bool(var): ValueError: The string should be one of the mentioned values. Returns: - bool: Boolean value corresponding to the input. + var_out (bool): Boolean value corresponding to the input. """ if isinstance(var, bool): - return var + var_out = var if var.lower() in ("yes", "true", "t", "y", "1"): - return True + var_out = True elif var.lower() in ("no", "false", "f", "n", "0"): - return False + var_out = False else: raise ValueError("Boolean value expected. [true or false] or [y or n]") + return var_out def get_git_sha(): """ @@ -173,7 +174,8 @@ def update_metadata(file, title, summary, contact, data_script, description): "source_code", "SVN_url", "hostname", - "history" "History_Log", + "history", + "History_Log", "Logname", "Host", "Version", From f9710cac2edb2fe6936647750142a11bc95c444b Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 4 Jan 2022 17:12:56 -0700 Subject: [PATCH 084/223] Changes to arguments and script names so that run_neon.py and subset_data tools test can work --- test/tools/TSMscript_tools.sh | 2 +- test/tools/input_tests_master | 4 ++-- test/tools/nl_files/run_neon_OSBS | 2 +- test/tools/nl_files/subset_data_YELL | 3 ++- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/test/tools/TSMscript_tools.sh b/test/tools/TSMscript_tools.sh index dbd75f4959..a191cebe1f 100755 --- a/test/tools/TSMscript_tools.sh +++ b/test/tools/TSMscript_tools.sh @@ -63,7 +63,7 @@ else tcbtools="$rundir" fi -scopts=`cat ${CLM_SCRIPTDIR}/nl_files/$optfile | sed -e "s|CSMDATA|$CSMDATA|g" | sed -e "s|EXEDIR|$tcbtools|" | sed -e "s|CFGDIR|$cfgdir|g"` +scopts=`cat ${CLM_SCRIPTDIR}/nl_files/$optfile | sed -e "s|CSMDATA|$CSMDATA|g" | sed -e "s|EXEDIR|$tcbtools|g" | sed -e "s|CFGDIR|$cfgdir|g"` scopts=`echo $scopts | sed -e "s|CTSM_ROOT|$CTSM_ROOT|g" | sed -e "s|CIME_ROOT|$CIME_ROOT|g"` echo "TSMscript_tools.sh: running ${cfgdir}/$2 with $scopts; output in ${rundir}/test.log" diff --git a/test/tools/input_tests_master b/test/tools/input_tests_master index 51cfa3a2fc..fd0d7efd6e 100644 --- a/test/tools/input_tests_master +++ b/test/tools/input_tests_master @@ -36,8 +36,8 @@ bliT2 TBLscript_tools.sh mksurfdata_map mksurfdata.pl mksrfdt_1x1_numaIA_crp_SSP sm0a1 TSMscript_tools.sh site_and_regional run_neon.py run_neon_OSBS bl0a1 TBLscript_tools.sh site_and_regional run_neon.py run_neon_OSBS -smba1 TSMscript_tools.sh site_and_regional subset_data.py subset_data_YELL -blba1 TBLscript_tools.sh site_and_regional subset_data.py subset_data_YELL +smba1 TSMscript_tools.sh site_and_regional subset_data subset_data_YELL +blba1 TBLscript_tools.sh site_and_regional subset_data subset_data_YELL smaa2 TSMscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL blaa2 TBLscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL diff --git a/test/tools/nl_files/run_neon_OSBS b/test/tools/nl_files/run_neon_OSBS index b45fc4ffe9..a696479c1a 100644 --- a/test/tools/nl_files/run_neon_OSBS +++ b/test/tools/nl_files/run_neon_OSBS @@ -1 +1 @@ -ad --case-root EXEDIR +--run-type ad diff --git a/test/tools/nl_files/subset_data_YELL b/test/tools/nl_files/subset_data_YELL index 8e1dcbcbb2..fc78dd5116 100644 --- a/test/tools/nl_files/subset_data_YELL +++ b/test/tools/nl_files/subset_data_YELL @@ -1 +1,2 @@ -point --lon 249.45804 --lat 44.95597 --site YELL --crop --dompft 1 --outdir EXEDIR/ +--verbose point --lon 250.45804 --lat 44.95597 --site YELL --dompft 1 --crop --create-domain --create-datm --datm-syr 2000 +--datm-eyr 2000 --outdir EXEDIR/YELL_user-mod_and_data --user-mods-dir EXEDIR/YELL_user-mod_and_data From 638526450bd68e9b227fe0ba1718cbffbe250f53 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 4 Jan 2022 17:31:25 -0700 Subject: [PATCH 085/223] Add --verbose to all of the site_and_regional script arguments, also limit the years of datm data run over to a single year --- test/tools/nl_files/modify_data_YELL | 2 +- test/tools/nl_files/run_neon_OSBS | 2 +- test/tools/nl_files/subset_data_YELL | 3 +-- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/test/tools/nl_files/modify_data_YELL b/test/tools/nl_files/modify_data_YELL index e76322cdeb..9317ef9180 100644 --- a/test/tools/nl_files/modify_data_YELL +++ b/test/tools/nl_files/modify_data_YELL @@ -1 +1 @@ ---neon_site YELL --surf_dir CSMDATA/lnd/clm2/surfdata_map/NEON --out_dir EXEDIR +--verbose --neon_site YELL --surf_dir CSMDATA/lnd/clm2/surfdata_map/NEON --out_dir EXEDIR diff --git a/test/tools/nl_files/run_neon_OSBS b/test/tools/nl_files/run_neon_OSBS index a696479c1a..f26904edaf 100644 --- a/test/tools/nl_files/run_neon_OSBS +++ b/test/tools/nl_files/run_neon_OSBS @@ -1 +1 @@ ---run-type ad +--verbose --run-type ad diff --git a/test/tools/nl_files/subset_data_YELL b/test/tools/nl_files/subset_data_YELL index fc78dd5116..99ffcbaaed 100644 --- a/test/tools/nl_files/subset_data_YELL +++ b/test/tools/nl_files/subset_data_YELL @@ -1,2 +1 @@ ---verbose point --lon 250.45804 --lat 44.95597 --site YELL --dompft 1 --crop --create-domain --create-datm --datm-syr 2000 ---datm-eyr 2000 --outdir EXEDIR/YELL_user-mod_and_data --user-mods-dir EXEDIR/YELL_user-mod_and_data +--verbose point --lon 250.45804 --lat 44.95597 --site YELL --dompft 1 --crop --create-domain --create-datm --datm-syr 2000 --create-surface --datm-eyr 2000 --outdir EXEDIR/YELL_user-mod_and_data --user-mods-dir EXEDIR/YELL_user-mod_and_data From 15f5c280c79b8d41edd6090f16d8708acdf4fe72 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 4 Jan 2022 17:47:36 -0700 Subject: [PATCH 086/223] Check if History_Log attribute exists before removing it as otherwise it will fail --- python/ctsm/site_and_regional/single_point_case.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index b73c5f4485..b134fca95f 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -275,7 +275,9 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # update attributes self.update_metadata(f_out) f_out.attrs["Created_from"] = fsurf_in - del f_out.attrs["History_Log"] + if ( hasattr(f_out.attrs, "History_Log" ) ): + del f_out.attrs["History_Log"] + # mode 'w' overwrites file wfile = os.path.join(self.output_dir, fsurf_out) f_out.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") From 4afe339fca6fb4f44126ebbcf3ca6662c50eaa1d Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 4 Jan 2022 17:50:40 -0700 Subject: [PATCH 087/223] verbose is not an option for the modify_singlept_site_neon.py script --- test/tools/nl_files/modify_data_YELL | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/tools/nl_files/modify_data_YELL b/test/tools/nl_files/modify_data_YELL index 9317ef9180..e76322cdeb 100644 --- a/test/tools/nl_files/modify_data_YELL +++ b/test/tools/nl_files/modify_data_YELL @@ -1 +1 @@ ---verbose --neon_site YELL --surf_dir CSMDATA/lnd/clm2/surfdata_map/NEON --out_dir EXEDIR +--neon_site YELL --surf_dir CSMDATA/lnd/clm2/surfdata_map/NEON --out_dir EXEDIR From 94777dcd2624b24c5de90bab7ea0c8103a54b334 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 4 Jan 2022 18:00:25 -0700 Subject: [PATCH 088/223] Add setup only option for run neon case, so that it will only setup a case, but not build and run it, so it checks that the general case setup works, and not the run part of it --- test/tools/nl_files/run_neon_OSBS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/tools/nl_files/run_neon_OSBS b/test/tools/nl_files/run_neon_OSBS index f26904edaf..c49fb77783 100644 --- a/test/tools/nl_files/run_neon_OSBS +++ b/test/tools/nl_files/run_neon_OSBS @@ -1 +1 @@ ---verbose --run-type ad +--verbose --run-type ad --setup-only From 3e5fedd9db745117f3bd665a211fb1bba0aab265 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 4 Jan 2022 23:27:14 -0700 Subject: [PATCH 089/223] verbose debug as subparser options. --- python/ctsm/subset_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index f84defc9c6..e40ea6e3b4 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -329,6 +329,7 @@ def get_parser(): type=str, default="", ) + add_logging_args(subparser) # -- print help for both subparsers parser.epilog = textwrap.dedent( @@ -576,7 +577,6 @@ def main(): # add logging flags from ctsm_logging setup_logging_pre_config() parser = get_parser() - add_logging_args(parser) args = parser.parse_args() process_logging_args(args) From 416f45f1c4cb5f6d3a3097b0405f864763b805fb Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 4 Jan 2022 23:27:56 -0700 Subject: [PATCH 090/223] bug fix one line. --- python/ctsm/site_and_regional/single_point_case.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index b73c5f4485..695cc56404 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -275,7 +275,6 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # update attributes self.update_metadata(f_out) f_out.attrs["Created_from"] = fsurf_in - del f_out.attrs["History_Log"] # mode 'w' overwrites file wfile = os.path.join(self.output_dir, fsurf_out) f_out.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") From 2af8e669f1721ccee3459059a33dc5030e90f545 Mon Sep 17 00:00:00 2001 From: Ryan Knox Date: Wed, 5 Jan 2022 14:20:57 -0500 Subject: [PATCH 091/223] Updated history variable test def list according to variable changes in FATES PR 766 --- .../clm/FatesColdDefHydro/user_nl_clm | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefHydro/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefHydro/user_nl_clm index 33c91d796a..f0bdb388eb 100644 --- a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefHydro/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefHydro/user_nl_clm @@ -3,10 +3,10 @@ hist_nhtfrq = -24 hist_empty_htapes = .true. use_fates_planthydro= .true. hist_fincl1 = 'FATES_ERRH2O_SZPF', 'FATES_TRAN_SZPF', -'FATES_SAPFLOW_SZPF', 'FATES_ITERH1_SZPF', 'FATES_ROOTH2O_ABS_SZPF', -'FATES_ROOTH2O_TRANS_SZPF', 'FATES_STEMH2O_SZPF', 'FATES_LEAFH2O_SZPF', -'FATES_ROOTH2O_POT_SZPF', 'FATES_BTRAN_SZPF', 'FATES_ROOTWGT_SOILVWC', -'FATES_ROOTWGT_SOILVWCSAT', 'FATES_ROOTWGT_SOILMATPOT', 'FATES_SOILMATPOT_SL', -'FATES_SOILVWC_SL', 'FATES_SOILVWCSAT_SL', 'FATES_ROOTUPTAKE', -'FATES_ROOTUPTAKE_SL', 'FATES_ROOTUPTAKE0_SZPF', 'FATES_ROOTUPTAKE10_SZPF', -'FATES_ROOTUPTAKE50_SZPF', 'FATES_ROOTUPTAKE100_SZPF' +'FATES_SAPFLOW_SZPF', 'FATES_ITERH1_SZPF','FATES_ABSROOT_H2O_SZPF', +'FATES_TRANSROOT_H2O_SZPF','FATES_STEM_H2O_SZPF','FATES_LEAF_H2O_SZPF', +'FATES_ABSROOT_H2OPOT_SZPF','FATES_BTRAN_SZPF','FATES_ROOTWGT_SOILVWC', +'FATES_ROOTWGT_SOILVWCSAT','FATES_ROOTWGT_SOILMATPOT','FATES_SOILMATPOT_SL', +'FATES_SOILVWC_SL','FATES_SOILVWCSAT_SL','FATES_ROOTUPTAKE', +'FATES_ROOTUPTAKE_SL','FATES_ROOTUPTAKE0_SZPF','FATES_ROOTUPTAKE10_SZPF', +'FATES_ROOTUPTAKE50_SZPF','FATES_ROOTUPTAKE100_SZPF' From 96717fbd632b90d598607a347d21188dd4ff45e9 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Wed, 5 Jan 2022 15:56:25 -0700 Subject: [PATCH 092/223] updates to argparse for clarity --- .../site_and_regional/single_point_case.py | 12 +++---- python/ctsm/subset_data.py | 35 ++++++++----------- 2 files changed, 20 insertions(+), 27 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 695cc56404..a733795990 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -93,9 +93,9 @@ def __init__( create_user_mods, overwrite_single_pft, dominant_pft, - zero_nonveg_landunits, + include_nonveg, uniform_snowpack, - saturation_excess, + cap_saturation, output_dir, ): super().__init__(create_domain, create_surfdata, create_landuse, create_datm, @@ -105,9 +105,9 @@ def __init__( self.site_name = site_name self.overwrite_single_pft = overwrite_single_pft self.dominant_pft = dominant_pft - self.zero_nonveg_landunits = zero_nonveg_landunits + self.include_nonveg = include_nonveg self.uniform_snowpack = uniform_snowpack - self.saturation_excess = saturation_excess + self.cap_saturation = cap_saturation self.output_dir = output_dir self.tag = None @@ -238,7 +238,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): f_out["PCT_NAT_PFT"][:, :, :] = 0 if self.dominant_pft < 16: f_out['PCT_NAT_PFT'][:, :, self.dominant_pft] = 100 - if self.zero_nonveg_landunits: + if not self.include_nonveg: f_out["PCT_NATVEG"][:, :] = 100 f_out["PCT_CROP"][:, :] = 0 f_out["PCT_LAKE"][:, :] = 0.0 @@ -247,7 +247,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): f_out["PCT_GLACIER"][:, :] = 0.0 if self.uniform_snowpack: f_out["STD_ELEV"][:, :] = 20.0 - if not self.saturation_excess: + if self.cap_saturation: f_out["FMAX"][:, :] = 0.0 # specify dimension order diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index e40ea6e3b4..f66b2d52a6 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -132,39 +132,32 @@ def get_parser(): default="", ) pt_parser.add_argument( - "--variable-snow-fraction", - help="Create surface data with variable snow fraction. If unset, snow is set to uniform " - "fraction.", - action="store_false", + "--uniform-snowpack", + help="Modify surface data to have a uniform snow fraction.", + action="store_true", dest="uni_snow", required=False, - default=True, ) pt_parser.add_argument( - "--allow-multiple-pft", - help="Create surface data with multiple PFTs. If unset, it assumes the whole grid is 100%% " - "single PFT set by --dom-pft.", - action="store_false", + "--overwrite-to-single-pft", + help="Modify surface dataset to be 100%% one single PFT set by --dom-pft.", + action="store_true", dest="overwrite_single_pft", required=False, - default=True, ) pt_parser.add_argument( - "--zero-nonveg", - help="Set all non-vegetation landunits in the surface data to zero.", + "--include-nonveg", + help="Do not zero non-vegetation land units in the surface data.", action="store_true", - dest="zero_nonveg", + dest="include_nonveg", required=False, - default=False, ) pt_parser.add_argument( - "--allow-saturation-excess", - help="Create surface data allowing saturated conditions. If unset saturation excess is set " - "to zero.", + "--cap-saturation", + help="Modify surface data to not allow saturation excess.", action="store_true", - dest="saturation_excess", + dest="cap_saturation", required=False, - default=False, ) pt_parser.add_argument( "--dompft", @@ -484,9 +477,9 @@ def subset_point(args, file_dict: dict): args.create_user_mods, args.overwrite_single_pft, args.dom_pft, - args.zero_nonveg, + args.include_nonveg, args.uni_snow, - args.saturation_excess, + args.cap_saturation, args.out_dir, ) From 5f417ec50b98a158a13258eefca830b367ba0045 Mon Sep 17 00:00:00 2001 From: Adrianna Foster Date: Wed, 5 Jan 2022 16:48:41 -0700 Subject: [PATCH 093/223] more updates to argparse --- python/ctsm/subset_data.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index f66b2d52a6..fb7b539655 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -270,7 +270,6 @@ def get_parser(): action="store_true", dest="create_user_mods", required=False, - default=True, ) subparser.add_argument( "--datm-syr", @@ -298,7 +297,6 @@ def get_parser(): action="store_true", dest="crop_flag", required=False, - default=False, ) if subparser == pt_parser: From d3a0c142c045b9c7b75628cc277d2b9abf276cec Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 00:05:31 -0700 Subject: [PATCH 094/223] putting args_utils. --- python/ctsm/args_utils.py | 57 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 python/ctsm/args_utils.py diff --git a/python/ctsm/args_utils.py b/python/ctsm/args_utils.py new file mode 100644 index 0000000000..598e54e77b --- /dev/null +++ b/python/ctsm/args_utils.py @@ -0,0 +1,57 @@ +""" +General-purpose utilities for handling command-line +arguments and flags in ctsm python codes. +""" + +from ctsm.config_utils import lon_range_0_to_360 + +# Types + +# Types for command-lines error handling: + +def plat_type(plat): + """ + Function to define lat type for the parser + and + raise error if latitude is not between -90 and 90. + + Args: + plat(str): latitude + Raises: + Error when plat (latitude) is not between -90 and 90. + Returns: + plat (float): latitude in float + """ + plat_out = float(plat) + if (plat_out < -90) or (plat_out > 90): + raise argparse.ArgumentTypeError( + "ERROR: Latitude should be between -90 and 90." + ) + return plat_out + +def plon_type(plon): + """ + Function to define lon type for the parser and + convert negative longitudes and + raise error if lon is not between -180 and 360. + + Args: + plon (str): longitude + Raises: + Error (ArgumentTypeError): when longitude is <-180 and >360. + Returns: + plon(float): converted longitude between 0 and 360 + """ + plon_out = float(plon) + if -180 <= plon < 0: + logger.debug("lon is: %f", plon) + plon = plon % 360 + logger.debug("after modulo lon is: %f", plon) + if plon < 0 or plon > 360: + raise argparse.ArgumentTypeError("ERROR: Longitude of single point should be between 0 and " + "360 or -180 and 180.") + return plon + + + + From fc082ef649aadc734154e987a9483f7cc2955d0c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 00:12:40 -0700 Subject: [PATCH 095/223] committing args_utils --- python/ctsm/args_utils.py | 30 +++++++++++++----------------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/python/ctsm/args_utils.py b/python/ctsm/args_utils.py index 598e54e77b..f524895472 100644 --- a/python/ctsm/args_utils.py +++ b/python/ctsm/args_utils.py @@ -3,12 +3,13 @@ arguments and flags in ctsm python codes. """ +from argparse import ArgumentParser from ctsm.config_utils import lon_range_0_to_360 -# Types # Types for command-lines error handling: + def plat_type(plat): """ Function to define lat type for the parser @@ -18,9 +19,9 @@ def plat_type(plat): Args: plat(str): latitude Raises: - Error when plat (latitude) is not between -90 and 90. + Error (ArgumentTypeError): when plat (latitude) is not between -90 and 90. Returns: - plat (float): latitude in float + plat_out (float): latitude in float """ plat_out = float(plat) if (plat_out < -90) or (plat_out > 90): @@ -29,10 +30,11 @@ def plat_type(plat): ) return plat_out + def plon_type(plon): """ Function to define lon type for the parser and - convert negative longitudes and + convert negative longitudes to 0-360 and raise error if lon is not between -180 and 360. Args: @@ -40,18 +42,12 @@ def plon_type(plon): Raises: Error (ArgumentTypeError): when longitude is <-180 and >360. Returns: - plon(float): converted longitude between 0 and 360 + plon_out (float): converted longitude between 0 and 360 """ - plon_out = float(plon) - if -180 <= plon < 0: - logger.debug("lon is: %f", plon) - plon = plon % 360 - logger.debug("after modulo lon is: %f", plon) + plon = float(plon) if plon < 0 or plon > 360: - raise argparse.ArgumentTypeError("ERROR: Longitude of single point should be between 0 and " - "360 or -180 and 180.") - return plon - - - - + raise argparse.ArgumentTypeError( + "ERROR: Longitude should be between 0 and 360 or -180 and 180." + ) + plon_out = lon_range_0_to_360(plon) + return plon_out From 694d1b3d1546d5836a1c4c1ddafa5889938f2521 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 00:18:16 -0700 Subject: [PATCH 096/223] moving plat and plon types to args_utils. --- python/ctsm/args_utils.py | 4 +- python/ctsm/subset_data.py | 84 +++++++++++++++++++------------------- 2 files changed, 45 insertions(+), 43 deletions(-) diff --git a/python/ctsm/args_utils.py b/python/ctsm/args_utils.py index f524895472..98faf068f6 100644 --- a/python/ctsm/args_utils.py +++ b/python/ctsm/args_utils.py @@ -3,7 +3,9 @@ arguments and flags in ctsm python codes. """ -from argparse import ArgumentParser +import logging +import argparse + from ctsm.config_utils import lon_range_0_to_360 diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index fb7b539655..b958e0eb7e 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -67,6 +67,7 @@ from ctsm.site_and_regional.base_case import DatmFiles from ctsm.site_and_regional.single_point_case import SinglePointCase from ctsm.site_and_regional.regional_case import RegionalCase +from ctsm.args_utils import plon_type, plat_type from ctsm.path_utils import path_to_ctsm_root # -- import ctsm logging flags @@ -331,48 +332,47 @@ def get_parser(): ) return parser - -def plat_type(plat): - """ - Function to define lat type for the parser - and - raise error if latitude is not between -90 and 90. - - Args: - plat(str): latitude - Raises: - Error (ArgumentTypeError): when plat (latitude) is not between -90 and 90. - Returns: - plat (float): latitude in float - """ - plat = float(plat) - if (plat < -90) or (plat > 90): - raise argparse.ArgumentTypeError("ERROR: Latitude should be between -90 and 90.") - return plat - - -def plon_type(plon): - """ - Function to define lon type for the parser and - convert negative longitudes and - raise error if lon is not between -180 and 360. - - Args: - plon (str): longitude - Raises: - Error (ArgumentTypeError): when longitude is <-180 and >360. - Returns: - plon(float): converted longitude between 0 and 360 - """ - plon = float(plon) - if -180 <= plon < 0: - logger.debug("lon is: %f", plon) - plon = plon % 360 - logger.debug("after modulo lon is: %f", plon) - if plon < 0 or plon > 360: - raise argparse.ArgumentTypeError("ERROR: Longitude of single point should be between 0 and " - "360 or -180 and 180.") - return plon +#def plat_type(plat): +# """ +# Function to define lat type for the parser +# and +# raise error if latitude is not between -90 and 90. +# +# Args: +# plat(str): latitude +# Raises: +# Error (ArgumentTypeError): when plat (latitude) is not between -90 and 90. +# Returns: +# plat (float): latitude in float +# """ +# plat = float(plat) +# if (plat < -90) or (plat > 90): +# raise argparse.ArgumentTypeError("ERROR: Latitude should be between -90 and 90.") +# return plat + + +#def plon_type(plon): +# """ +# Function to define lon type for the parser and +# convert negative longitudes and +# raise error if lon is not between -180 and 360. +# +# Args: +# plon (str): longitude +# Raises: +# Error (ArgumentTypeError): when longitude is <-180 and >360. +# Returns: +# plon(float): converted longitude between 0 and 360 +# """ +# plon = float(plon) +# if -180 <= plon < 0: +# logger.debug("lon is: %f", plon) +# plon = plon % 360 +# logger.debug("after modulo lon is: %f", plon) +# if plon < 0 or plon > 360: +# raise argparse.ArgumentTypeError("ERROR: Longitude of single point should be between 0 and " +# "360 or -180 and 180.") +# return plon def setup_user_mods(user_mods_dir, cesmroot): From 85cc86170a6f51ef1c4ae9820a0789cb42721e82 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 00:21:36 -0700 Subject: [PATCH 097/223] removing plat plon types from subset_data.py --- python/ctsm/config_utils.py | 142 ++++++++++++++++++++++++++++++++++++ python/ctsm/subset_data.py | 43 ----------- 2 files changed, 142 insertions(+), 43 deletions(-) create mode 100644 python/ctsm/config_utils.py diff --git a/python/ctsm/config_utils.py b/python/ctsm/config_utils.py new file mode 100644 index 0000000000..95a5548ec8 --- /dev/null +++ b/python/ctsm/config_utils.py @@ -0,0 +1,142 @@ +""" +General-purpose utilities and functions for handling command-line +config files in ctsm python codes. +""" + +import logging + +from ctsm.utils import abort + +logger = logging.getLogger(__name__) + +def lon_range_0_to_360(lon_in): + """ + Description + ----------- + Restrict longitude to 0 to 360 when given as -180 to 180. + """ + if -180 <= lon_in < 0: + lon_out = lon_in % 360 + logger.info( + "Resetting longitude from %s to %s to keep in the range " " 0 to 360", + str(lon_in), + str(lon_out), + ) + elif 0 <= lon_in <= 360 or lon_in is None: + lon_out = lon_in + else: + errmsg = "lon_in needs to be in the range 0 to 360" + abort(errmsg) + + return lon_out + + +def get_config_value( + config, + section, + item, + file_path, + allowed_values=None, + default=None, + is_list=False, + convert_to_type=None, + can_be_unset=False, +): + """Get a given item from a given section of the config object + Give a helpful error message if we can't find the given section or item + Note that the file_path argument is only used for the sake of the error message + If allowed_values is present, it should be a list of strings giving allowed values + The function _handle_config_value determines what to do if we read: + - a list or + - a str that needs to be converted to int / float / bool + - _CONFIG_UNSET: anything with the value "UNSET" will become "None" + """ + try: + val = config.get(section, item) + except NoSectionError: + abort( + "ERROR: Config file {} must contain section '{}'".format(file_path, section) + ) + except NoOptionError: + abort( + "ERROR: Config file {} must contain item '{}' in section '{}'".format( + file_path, item, section + ) + ) + + if val == _CONFIG_PLACEHOLDER: + abort( + "Error: {} needs to be specified in config file {}".format(item, file_path) + ) + + val = _handle_config_value( + var=val, + default=default, + item=item, + is_list=is_list, + convert_to_type=convert_to_type, + can_be_unset=can_be_unset, + allowed_values=allowed_values, + ) + + +def _handle_config_value( + var, default, item, is_list, convert_to_type, can_be_unset, allowed_values +): + """ + Description + ----------- + Assign the default value or the user-specified one to var. + Convert from default type (str) to reqested type (int or float). + + If is_list is True, then default should be a list + """ + if var == _CONFIG_UNSET: + if can_be_unset: + return default # default may be None + abort("Must set a value for .cfg file variable: {}".format(item)) + + # convert string to list of strings; if there is just one element, + # we will get a list of size one, which we will convert back to a + # scalar later if needed + var = var.split() + + if convert_to_type is bool: + try: + var = [_convert_to_bool(v) for v in var] + except ValueError: + abort("Non-boolean value found for .cfg file variable: {}".format(item)) + elif convert_to_type is not None: + try: + var = [convert_to_type(v) for v in var] + except ValueError: + abort("Wrong type for .cfg file variable: {}".format(item)) + + if allowed_values is not None: + for val in var: + if val not in allowed_values: + print("val = ", val, " in var not in allowed_values") + errmsg = ( + "{} is not an allowed value for {} in .cfg file. " + "Check allowed_values".format(val, item) + ) + abort(errmsg) + + if not is_list: + if len(var) > 1: + abort("More than 1 element found for .cfg file variable: {}".format(item)) + var = var[0] + + return var + + +def _convert_to_bool(val): + """Convert the given value to boolean + + Conversion is as in config files 'getboolean' + """ + if val.lower() in ["1", "yes", "true", "on", "f"]: + return True + if val.lower() in ["0", "no", "false", "off", "f"]: + return False + raise ValueError("{} cannot be converted to boolean".format(val)) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index b958e0eb7e..e18a5f2565 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -332,49 +332,6 @@ def get_parser(): ) return parser -#def plat_type(plat): -# """ -# Function to define lat type for the parser -# and -# raise error if latitude is not between -90 and 90. -# -# Args: -# plat(str): latitude -# Raises: -# Error (ArgumentTypeError): when plat (latitude) is not between -90 and 90. -# Returns: -# plat (float): latitude in float -# """ -# plat = float(plat) -# if (plat < -90) or (plat > 90): -# raise argparse.ArgumentTypeError("ERROR: Latitude should be between -90 and 90.") -# return plat - - -#def plon_type(plon): -# """ -# Function to define lon type for the parser and -# convert negative longitudes and -# raise error if lon is not between -180 and 360. -# -# Args: -# plon (str): longitude -# Raises: -# Error (ArgumentTypeError): when longitude is <-180 and >360. -# Returns: -# plon(float): converted longitude between 0 and 360 -# """ -# plon = float(plon) -# if -180 <= plon < 0: -# logger.debug("lon is: %f", plon) -# plon = plon % 360 -# logger.debug("after modulo lon is: %f", plon) -# if plon < 0 or plon > 360: -# raise argparse.ArgumentTypeError("ERROR: Longitude of single point should be between 0 and " -# "360 or -180 and 180.") -# return plon - - def setup_user_mods(user_mods_dir, cesmroot): """ Sets up the user mods files and directories From 432972647518af9f6c49f41654529fd8e50e5c56 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 00:31:15 -0700 Subject: [PATCH 098/223] updating get_git functions --- python/ctsm/git_utils.py | 8 ++++---- python/ctsm/utils.py | 24 ------------------------ 2 files changed, 4 insertions(+), 28 deletions(-) diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py index 868040c269..74f5a8af4b 100644 --- a/python/ctsm/git_utils.py +++ b/python/ctsm/git_utils.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -def get_git_short_hash(): +def get_ctsm_git_short_hash(): """ Returns Git short SHA for the currect directory. @@ -28,7 +28,7 @@ def get_git_short_hash(): return sha -def get_git_long_hash(): +def get_ctsm_git_long_hash(): """ Returns Git long SHA for the currect directory. @@ -47,7 +47,7 @@ def get_git_long_hash(): return sha -def get_git_describe(): +def get_ctsm_git_describe(): """ Function for giving the recent tag of the git repo @@ -59,7 +59,7 @@ def get_git_describe(): label (str) : ouput of running 'git describe' in shell """ label = ( - subprocess.check_output(["git", "describe", path_to_ctsm_root()]) + subprocess.check_output(["git", "-C", path_to_ctsm_root(), "describe"]) .strip() .decode() ) diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index cb77a25f46..ca4a692fbe 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -78,30 +78,6 @@ def str2bool(var): return var_out -def get_git_sha(): - """ - Returns Git short SHA for the currect directory. - """ - return ( - subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) - .strip() - .decode() - ) - - -def get_ctsm_git_sha(): - """ - Returns Git short SHA for the ctsm directory. - """ - return ( - subprocess.check_output( - ["git", "-C", path_to_ctsm_root(), "rev-parse", "--short", "HEAD"] - ) - .strip() - .decode() - ) - - def add_tag_to_filename(filename, tag): """ Add a tag and replace timetag of a filename From 86d6e793bbe55a6749d0a9352fc894a43d05ba50 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 00:33:18 -0700 Subject: [PATCH 099/223] modifying surface dataset update get_git. --- python/ctsm/modify_fsurdat/modify_fsurdat.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/python/ctsm/modify_fsurdat/modify_fsurdat.py b/python/ctsm/modify_fsurdat/modify_fsurdat.py index bf1a5e8c9b..245ff887ae 100644 --- a/python/ctsm/modify_fsurdat/modify_fsurdat.py +++ b/python/ctsm/modify_fsurdat/modify_fsurdat.py @@ -11,7 +11,8 @@ import numpy as np import xarray as xr -from ctsm.utils import abort, get_ctsm_git_sha, update_metadata, lon_range_0_to_360 +from ctsm.git_utils import get_ctsm_git_short_hash +from ctsm.utils import abort, update_metadata, lon_range_0_to_360 logger = logging.getLogger(__name__) @@ -108,7 +109,7 @@ def write_output(self, fsurdat_in, fsurdat_out): title = 'Modified fsurdat file' summary = 'Modified fsurdat file' contact = 'N/A' - data_script = os.path.abspath(__file__) + " -- " + get_ctsm_git_sha() + data_script = os.path.abspath(__file__) + " -- " + get_ctsm_git_short_hash() description = 'Modified this file: ' + fsurdat_in update_metadata(self.file, title=title, summary=summary, contact=contact, data_script=data_script, From 00120d74ac0066fbdfe7ec55b51e94c825e11318 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 00:47:44 -0700 Subject: [PATCH 100/223] addressing minor comments. --- python/ctsm/site_and_regional/base_case.py | 6 +++--- python/ctsm/site_and_regional/regional_case.py | 2 +- python/ctsm/site_and_regional/single_point_case.py | 2 +- python/ctsm/subset_data.py | 2 -- 4 files changed, 5 insertions(+), 7 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index dab157748c..272af59a67 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -18,7 +18,7 @@ import xarray as xr # -- import local classes for this script -from ctsm.git_utils import get_git_short_hash +from ctsm.git_utils import get_ctsm_git_short_hash USRDAT_DIR = "CLM_USRDAT_DIR" logger = logging.getLogger(__name__) @@ -151,11 +151,11 @@ def update_metadata(nc_file): today_string = today.strftime("%Y-%m-%d") # get git hash - sha = get_git_short_hash() + sha = get_ctsm_git_short_hash() nc_file.attrs["Created_on"] = today_string nc_file.attrs["Created_by"] = getuser() - nc_file.attrs["Created_with"] = os.path.abspath(__file__) + " -- " + sha + nc_file.attrs["Created_with"] = './subset_data' + " -- " + sha # delete unrelated attributes if they exist del_attrs = [ diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 712116e90a..107c2ab4df 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -87,7 +87,7 @@ def __init__( self.lon2 = lon2 self.reg_name = reg_name self.output_dir = output_dir - self.tag = None + self.create_tag() def create_tag(self): """ diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index a733795990..fa63081a60 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -109,7 +109,7 @@ def __init__( self.uniform_snowpack = uniform_snowpack self.cap_saturation = cap_saturation self.output_dir = output_dir - self.tag = None + self.create_tag() def create_tag(self): """ diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index e18a5f2565..76fb6e515c 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -438,7 +438,6 @@ def subset_point(args, file_dict: dict): args.out_dir, ) - single_point.create_tag() logger.debug(single_point) # -- Create CTSM domain file @@ -495,7 +494,6 @@ def subset_region(args, file_dict: dict): args.out_dir, ) - region.create_tag() logger.debug(region) # -- Create CTSM domain file From 7991267facfb9a20e021b0b1ab2569fd2ba63961 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 01:02:47 -0700 Subject: [PATCH 101/223] removing the lsmlat lsmlon update lines that were duplicates. --- python/ctsm/site_and_regional/single_point_case.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index fa63081a60..951883335f 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -227,11 +227,6 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # expand dimensions f_out = f_out.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) - # update the plon and plat to match the surface data - # we do this so that if we create user_mods the PTS_LON and PTS_LAT in CIME match - # the surface data coordinates - which is required - self.plat = f_out.coords["lsmlat"].values[0] - self.plon = f_out.coords["lsmlon"].values[0] # modify surface data properties if self.overwrite_single_pft: @@ -267,6 +262,8 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): ) # update lsmlat and lsmlon to match site specific instead of the nearest point + # we do this so that if we create user_mods the PTS_LON and PTS_LAT in CIME match + # the surface data coordinates - which is required f_out['lsmlon'] = np.atleast_1d(self.plon) f_out['lsmlat'] = np.atleast_1d(self.plat) f_out['LATIXY'][:, :] = self.plat From 3c0827f4553840a364b00b0ef7e1ad86993d5a10 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 01:35:17 -0700 Subject: [PATCH 102/223] updating config_utils.py --- python/ctsm/config_utils.py | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/python/ctsm/config_utils.py b/python/ctsm/config_utils.py index 95a5548ec8..7729f2b060 100644 --- a/python/ctsm/config_utils.py +++ b/python/ctsm/config_utils.py @@ -129,14 +129,29 @@ def _handle_config_value( return var +def _convert_to_bool(var): + """ + Function for converting different forms of + boolean strings to boolean value. + + Args: + var (str): String bool input -def _convert_to_bool(val): - """Convert the given value to boolean + Raises: + if the argument is not an acceptable boolean string + (such as yes or no ; true or false ; y or n ; t or f ; 0 or 1). + ValueError: The string should be one of the mentioned values. - Conversion is as in config files 'getboolean' + Returns: + var_out (bool): Boolean value corresponding to the input. """ - if val.lower() in ["1", "yes", "true", "on", "f"]: - return True - if val.lower() in ["0", "no", "false", "off", "f"]: - return False - raise ValueError("{} cannot be converted to boolean".format(val)) + if var.lower() in ("yes", "true", "t", "y", "1"): + var_out = True + elif var.lower() in ("no", "false", "f", "n", "0"): + var_out = False + else: + raise ValueError("Boolean value expected. [true or false] or [y or n]") + + return var_out + + From bccabbd841a044ce2411958ca3a458a4269a1cb8 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 01:48:53 -0700 Subject: [PATCH 103/223] removing defaults. --- python/ctsm/subset_data.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 76fb6e515c..71c66e44ea 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 """ |------------------------------------------------------------------| |--------------------- Instructions -----------------------------| @@ -77,9 +76,6 @@ process_logging_args, ) -_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", 'python')) -sys.path.insert(1, _CTSM_PYTHON) - DEFAULTS_FILE = "default_data.cfg" logger = logging.getLogger(__name__) @@ -141,7 +137,7 @@ def get_parser(): ) pt_parser.add_argument( "--overwrite-to-single-pft", - help="Modify surface dataset to be 100%% one single PFT set by --dom-pft.", + help="Modify surface dataset to be 100%% one single PFT set by --dompft.", action="store_true", dest="overwrite_single_pft", required=False, @@ -239,7 +235,6 @@ def get_parser(): action="store_true", dest="create_domain", required=False, - default=False, ) subparser.add_argument( "--create-surface", @@ -247,7 +242,6 @@ def get_parser(): action="store_true", dest="create_surfdata", required=False, - default=False, ) subparser.add_argument( "--create-landuse", @@ -255,7 +249,6 @@ def get_parser(): action="store_true", dest="create_landuse", required=False, - default=False, ) subparser.add_argument( "--create-datm", @@ -263,7 +256,6 @@ def get_parser(): action="store_true", dest="create_datm", required=False, - default=False, ) subparser.add_argument( "--create-user-mods", From f362eb036b58f05fc9a0fa1bc153a63d5f58353c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 01:53:48 -0700 Subject: [PATCH 104/223] abort! --- python/ctsm/utils.py | 32 ++------------------------------ 1 file changed, 2 insertions(+), 30 deletions(-) diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index ca4a692fbe..24ecb8db2b 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -50,34 +50,6 @@ def fill_template_file(path_to_template, path_to_final, substitutions): with open(path_to_final, "w") as final_file: final_file.write(final_file_contents) - -def str2bool(var): - """ - Function for converting different forms of - command line boolean strings to boolean value. - - Args: - var (str): String bool input - - Raises: - if the argument is not an acceptable boolean string - (such as yes or no ; true or false ; y or n ; t or f ; 0 or 1). - ValueError: The string should be one of the mentioned values. - - Returns: - var_out (bool): Boolean value corresponding to the input. - """ - if isinstance(var, bool): - var_out = var - if var.lower() in ("yes", "true", "t", "y", "1"): - var_out = True - elif var.lower() in ("no", "false", "f", "n", "0"): - var_out = False - else: - raise ValueError("Boolean value expected. [true or false] or [y or n]") - - return var_out - def add_tag_to_filename(filename, tag): """ Add a tag and replace timetag of a filename @@ -104,8 +76,8 @@ def add_tag_to_filename(filename, tag): if basename[cend] == "c": cend = cend - 1 if (basename[cend] != ".") and (basename[cend] != "_"): - logger.error("Trouble figuring out where to add tag to filename:" + filename) - os.abort() + logger.error("Trouble figuring out where to add tag to filename:" , filename) + abort() today = date.today() today_string = today.strftime("%y%m%d") fname_out = basename[:cend] + "_" + tag + "_c" + today_string + ".nc" From d000f538a6f7fe1699409f24fab360346d489638 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sat, 8 Jan 2022 02:42:06 -0700 Subject: [PATCH 105/223] updating the use of get_config_value --- python/ctsm/lilac_make_runtime_inputs.py | 3 ++- python/ctsm/modify_fsurdat/fsurdat_modifier.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/python/ctsm/lilac_make_runtime_inputs.py b/python/ctsm/lilac_make_runtime_inputs.py index 567b79d1a1..1d156077d9 100644 --- a/python/ctsm/lilac_make_runtime_inputs.py +++ b/python/ctsm/lilac_make_runtime_inputs.py @@ -11,7 +11,8 @@ from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args from ctsm.path_utils import path_to_ctsm_root -from ctsm.utils import abort, get_config_value +from ctsm.utils import abort +from ctsm.config_utils import get_config_value logger = logging.getLogger(__name__) diff --git a/python/ctsm/modify_fsurdat/fsurdat_modifier.py b/python/ctsm/modify_fsurdat/fsurdat_modifier.py index 76b2374a05..d4f62a1fee 100644 --- a/python/ctsm/modify_fsurdat/fsurdat_modifier.py +++ b/python/ctsm/modify_fsurdat/fsurdat_modifier.py @@ -8,7 +8,7 @@ import logging import argparse from configparser import ConfigParser -from ctsm.utils import get_config_value +from ctsm.config_utils import get_config_value from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args from ctsm.modify_fsurdat.modify_fsurdat import ModifyFsurdat From 07d677d42e291dd863e30ca2f2302a396a80996f Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Sun, 9 Jan 2022 20:50:02 -0700 Subject: [PATCH 106/223] Remove the getregional_datasets scripts and the testing for it as well as the documentation about them --- test/tools/nl_files/getregional | 1 - test/tools/nl_files/getregional_05popd | 1 - test/tools/nl_files/getregional_T62 | 1 - test/tools/nl_files/getregional_ndep | 1 - tools/site_and_regional/README.getregional | 35 -- .../getregional_datasets.ncl | 268 ------------- .../site_and_regional/getregional_datasets.pl | 375 ------------------ tools/site_and_regional/sample_inlist | 18 - tools/site_and_regional/sample_inlist_0.5popd | 22 - tools/site_and_regional/sample_inlist_T62 | 23 -- tools/site_and_regional/sample_inlist_ndep | 22 - tools/site_and_regional/sample_outlist | 14 - .../site_and_regional/sample_outlist_0.5popd | 14 - tools/site_and_regional/sample_outlist_T62 | 16 - tools/site_and_regional/sample_outlist_ndep | 15 - 15 files changed, 826 deletions(-) delete mode 100644 test/tools/nl_files/getregional delete mode 100644 test/tools/nl_files/getregional_05popd delete mode 100644 test/tools/nl_files/getregional_T62 delete mode 100644 test/tools/nl_files/getregional_ndep delete mode 100644 tools/site_and_regional/README.getregional delete mode 100644 tools/site_and_regional/getregional_datasets.ncl delete mode 100755 tools/site_and_regional/getregional_datasets.pl delete mode 100644 tools/site_and_regional/sample_inlist delete mode 100644 tools/site_and_regional/sample_inlist_0.5popd delete mode 100644 tools/site_and_regional/sample_inlist_T62 delete mode 100644 tools/site_and_regional/sample_inlist_ndep delete mode 100644 tools/site_and_regional/sample_outlist delete mode 100644 tools/site_and_regional/sample_outlist_0.5popd delete mode 100644 tools/site_and_regional/sample_outlist_T62 delete mode 100644 tools/site_and_regional/sample_outlist_ndep diff --git a/test/tools/nl_files/getregional b/test/tools/nl_files/getregional deleted file mode 100644 index 5e5d348e39..0000000000 --- a/test/tools/nl_files/getregional +++ /dev/null @@ -1 +0,0 @@ --SW 52,190 -NE 73,220 -i sample_inlist -o sample_outlist diff --git a/test/tools/nl_files/getregional_05popd b/test/tools/nl_files/getregional_05popd deleted file mode 100644 index 79747ad9cd..0000000000 --- a/test/tools/nl_files/getregional_05popd +++ /dev/null @@ -1 +0,0 @@ --SW 52,190 -NE 73,220 -i sample_inlist_0.5popd -o sample_outlist_0.5popd diff --git a/test/tools/nl_files/getregional_T62 b/test/tools/nl_files/getregional_T62 deleted file mode 100644 index 8288847cf5..0000000000 --- a/test/tools/nl_files/getregional_T62 +++ /dev/null @@ -1 +0,0 @@ --SW 52,190 -NE 73,220 -i sample_inlist_T62 -o sample_outlist_T62 diff --git a/test/tools/nl_files/getregional_ndep b/test/tools/nl_files/getregional_ndep deleted file mode 100644 index 125285f690..0000000000 --- a/test/tools/nl_files/getregional_ndep +++ /dev/null @@ -1 +0,0 @@ --SW 52,190 -NE 73,220 -i sample_inlist_ndep -o sample_outlist_ndep diff --git a/tools/site_and_regional/README.getregional b/tools/site_and_regional/README.getregional deleted file mode 100644 index 56c1d0834d..0000000000 --- a/tools/site_and_regional/README.getregional +++ /dev/null @@ -1,35 +0,0 @@ -$CTSMROOT/tools/site_and_regional/README.getregional Erik Kluzek - 06/08/2018 - -Information on the getregional_datasets script. - -The getregional_datasets.pl script operates on global datasets and -extracts out a regional box (or single point) within it. - - -QUICKSTART: - - -Here is how you would use the script to run a setup a simple case. - -1.) Create list of input global files you want to extract from. - -A sample file is: sample_inlist - -2.) Create list of regional files that will be created. - -A sample file is: sample_outlist - -3.) Run getregional - -set DIR=`pwd` -./getregional_datasets.pl -ne 74,221 -sw 51,189 -i sample_inlist -o sample_outlist - -4.) Make sure the user_nl_clm and xmlchange_cmnds files are correct. - -getregional will create a user_nl_clm file and a xmlchange_cmnds script to set -needed env_run settings. - -3.) Create your case using the user_mods_dir option and CLM_USRDAT resolution - -./create_newcase --res CLM_USRDAT --user_mods_dir $DIR --case testAlaska --compset I2000Clm50SpGs diff --git a/tools/site_and_regional/getregional_datasets.ncl b/tools/site_and_regional/getregional_datasets.ncl deleted file mode 100644 index a6da88c67a..0000000000 --- a/tools/site_and_regional/getregional_datasets.ncl +++ /dev/null @@ -1,268 +0,0 @@ -; -; Extract out regional datasets needed to run clm from the global datasets. -; NOTE: Requires at least NCL version 5.1.0 or later... -; -; Erik Kluzek -; Aug/28/2009 -; -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl"; - -procedure getfilecoord_namenlen( filenames[*]:string, dimnames[*]:string, dimlens[*]:integer, nlen:integer, name:string ) -; -; get the name and size of either the latitude or longitude -; - local d, l -begin - if ( name .eq. "" )then - do d = 0, dimsizes(filenames)-1 - if ( any(dimnames .eq. filenames(d) ) )then - name = filenames(d) - ; Get length of this dimension - do l = 0, dimsizes(dimnames)-1 - if ( dimnames(l) .eq. name )then - nlen = dimlens(l) - end if - end do - end if - end do - end if -end - -begin - ; =========================================================================================================== - ; - ; IMPORTANT NOTE: EDIT THE FOLLOWING TO CUSTOMIZE or use ENV VARIABLE SETTINGS - ; Edit the following as needed to interpolate to a new resolution. - ; - ; Input resolution and position - ; - latS = stringtodouble( getenv("S_LAT") ); ; Get south latitude from env variable - latN = stringtodouble( getenv("N_LAT") ); ; Get north latitude from env variable - lonE = stringtodouble( getenv("E_LON") ); ; Get east longitude from env variable - lonW = stringtodouble( getenv("W_LON") ); ; Get west longitude from env variable - debug_str = getenv("DEBUG"); ; Don't run just -- debug - print_str = getenv("PRINT"); ; Do Extra printing for debugging - gridfile = getenv("GRIDFILE"); ; Input global grid file - nfiles = stringtointeger( getenv("NFILES") ); ; number of files to read in file lists - filelistfil = getenv("INFILELIST"); ; filename of list of global files to work on - regfilelistfil = getenv("OUTFILELIST"); ; filename of list of regional eiles to create - - if ( ismissing(nfiles) )then - print( "NFILES is missing -- need to provide the number of files to process" ); - status_exit( -1 ) - end if - if ( ismissing(filelistfil) .or. ismissing(regfilelistfil) )then - print( "INFILELIST or OUTFILELIST is missing -- need to provide both" ); - status_exit( -1 ) - end if - if ( ismissing(latS) )then - latS = 52.0d00; - end if - if ( ismissing(latN) )then - latN = 73.0d00; - end if - if ( ismissing(lonW) )then - lonW = 190.0d00; - end if - if ( ismissing(lonE) )then - lonE = 220.0d00; - end if - if ( ismissing(print_str) )then - printn = False; - else - if ( print_str .eq. "TRUE" )then - printn = True; - else - printn = False; - end if - end if - if ( ismissing(debug_str) )then - debug = False; - else - if ( debug_str .eq. "TRUE" )then - print( "DEBUG is TRUE do extra printing AND do NOT execute -- just print what WOULD happen" ); - debug = True; - printn = True; - else - debug = False; - end if - end if - print( "Extract out regional datasets from global datasets" ); - if ( printn .eq. True )then - print( "Regional: Latitude="+latS+"-"+latN+" Longitude="+lonW+"-"+lonE ); - end if - - ; - ; Setup the namelist query script - ; - ldate = systemfunc( "date" ); - clmroot = getenv("CLM_ROOT"); - - ; - ; list of latitude and longitude names - ; - filelatnames = (/ "lsmlat", "lat", "nj" /); - filelonnames = (/ "lsmlon", "lon", "ni" /); - - ; - ; Open file - ; - if ( systemfunc("test -f "+gridfile+"; echo $?" ) .ne. 0 )then - print( "Input gridfile does not exist or not found: "+gridfile ); - status_exit( -1 ) - end if - if ( printn .eq. True )then - print( "gridfile:"+gridfile ); - end if - ncg = addfile( gridfile, "r" ); - ; - ; Get the names for latitude/longitude on the grid file - ; - varnames = getfilevarnames( ncg ); - gridlonnm = "" - gridlatnm = "" - glat = 0 - glon = 0 - varlens = new( dimsizes(varnames), "integer" ); - getfilecoord_namenlen( (/ "yc", "LATIXY"/), varnames, varlens, glat, gridlatnm ); - getfilecoord_namenlen( (/ "xc", "LONGXY"/), varnames, varlens, glon, gridlonnm ); - delete( varnames ); - delete( varlens ); - if ( gridlatnm .eq. "" )then - print( "Could not find a recognizable latitude dimension name" ) - status_exit(-1); - end if - if ( printn .eq. True )then - print( "gridlatname = "+gridlatnm ) - print( "gridlonname = "+gridlonnm ) - end if - - gridlon = ncg->$gridlonnm$; - gridlon = where( gridlon < 0.0, 360.0 + gridlon, gridlon ); - - indx = region_ind ( (/ncg->$gridlatnm$/), (/gridlon/), latS, latN, lonW, lonE ); - ; Indexes into indices - ilat0 = 0; - ilatN = 1; - ilon0 = 2; - ilonN = 3; - - latdim = dimsizes(ncg->$gridlatnm$(:,0)) - londim = dimsizes(gridlon(0,:)) - if ( any( ismissing(indx)) )then - print( "Indices:"+indx ); - print( "Missing indices found" ); - print( "nlat: "+latdim ); - print( "nlon: "+londim ); - print( "yc: "+ncg->$gridlatnm$(:,0) ); - print( "xc: "+gridlon(0,:) ); - status_exit(-1); - end if - - if ( debug .eq. True )then - print( "Indices:"+indx ); - end if - if ( printn .eq. True )then - print( "Full grid size: nlat = "+latdim+" nlon = "+londim ) - loclatdim = indx(ilatN) - indx(ilat0) + 1; - loclondim = indx(ilonN) - indx(ilon0) + 1; - print( "Grid size:"+loclatdim+"x"+loclondim ); - LOLAT = ncg->$gridlatnm$(indx(ilat0),indx(ilon0)); - HILAT = ncg->$gridlatnm$(indx(ilatN),indx(ilonN)); - print( "Actual grid span: Latitude="+LOLAT+"-"+HILAT ); - LOLON = gridlon(indx(ilat0),indx(ilon0)); - HILON = gridlon(indx(ilatN),indx(ilonN)); - print( "Actual grid span: Longitude="+LOLON+"-"+HILON ); - end if - - ; - ; Read in the list of files - ; - filelist = asciiread(filelistfil(0), (/ nfiles /), "string"); - regfilelist = asciiread(regfilelistfil(0), (/ nfiles /), "string"); - ; - ; Loop over each of the files to process... - ; - do i = 0, nfiles-1 - ; - ; Get the filename of the input global file and the output regional filename - ; - globalfile = filelist(i) - if ( systemfunc("test -f "+globalfile+"; echo $?" ) .ne. 0 )then - print( "Input global "+globalfile+" file does not exist or not found: "+globalfile ); - status_exit(-1); - end if - if ( debug .eq. True )then - print( "Process file: "+globalfile ); - end if - regfile = regfilelist(i) - if ( ismissing(regfile) )then - print( "Output regional filename was NOT found: "+regfile ); - status_exit(-1); - end if - - nc = addfile( globalfile, "r" ); - varnames = getfilevarnames( nc ); - filelonnm = "" - filelatnm = "" - nlat = 0 - nlon = 0 - do v = 0, dimsizes(varnames)-1 - dimnames = getfilevardims( nc, varnames(v) ); - dimlens = getfilevardimsizes( nc, varnames(v) ); - getfilecoord_namenlen( filelatnames, dimnames, dimlens, nlat, filelatnm ); - getfilecoord_namenlen( filelonnames, dimnames, dimlens, nlon, filelonnm ); - delete( dimnames ); - delete( dimlens ); - end do - if ( filelatnm .eq. "" )then - print( "Could not find a recognizable latitude dimension name" ) - status_exit(-1); - end if - if ( printn .eq. True )then - print( "nlat = "+nlat+" nlon = "+nlon ) - end if - ; - ; Check to make sure number of latitudes and longitudes are the same as on the domain file - ; - if ( (latdim .ne. nlat) .or. (londim .ne. nlon) )then - print( "Latitude or longitude dimensions do NOT match the grid file for file: "+globalfile ); - status_exit(-1); - end if - ; - ; Run ncks on it over the region of interest - ; - do v = 0, dimsizes(varnames)-1 - cmd = "ncks -O -d "+filelatnm+","+indx(ilat0)+","+indx(ilatN)+" -d "+filelonnm+","+indx(ilon0)+","+indx(ilonN); - cmd = cmd + " -v " + varnames(v) + " " + globalfile + " "+regfile+"_VAR"+varnames(v)+".nc" - print( "Execute:"+cmd ); - if ( debug .eq. False )then - if ( systemfunc( cmd+"; echo $?" ) .ne. 0 )then - print( "Command did not complete successfully: " ); - status_exit( -1 ) - end if - end if - cmd = "ncks -A "+regfile+"_VAR"+varnames(v)+".nc "+regfile - print( "Execute:"+cmd ); - if ( debug .eq. False )then - if ( systemfunc( cmd+"; echo $?" ) .ne. 0 )then - print( "Command did not complete successfully: " ); - status_exit( -1 ) - end if - system( "/bin/rm "+regfile+"_VAR"+varnames(v)+".nc" ) - end if - end do - delete( varnames ); - if ( debug .eq. False )then - ; - ; Open up resultant file for writing - ; - nco = addfile( regfile, "w" ); - nco@history = nco@history + ":"+ldate + ": "; - end if - end do - - print( "================================================================================================" ); - print( "Successfully created regional datasets from global datasets" ); - -end diff --git a/tools/site_and_regional/getregional_datasets.pl b/tools/site_and_regional/getregional_datasets.pl deleted file mode 100755 index 5fee1a1493..0000000000 --- a/tools/site_and_regional/getregional_datasets.pl +++ /dev/null @@ -1,375 +0,0 @@ -#!/usr/bin/env perl -#======================================================================= -# -# Extract out regional datasets from the global datasets. -# -# Usage: -# -# getregional_datasets.pl -# -# Erik Kluzek -# Aug/28/2009 -# -#======================================================================= - -use Cwd; -use strict; -#use diagnostics; -use English; -use Getopt::Long; -use IO::File; - -#----------------------------------------------------------------------------------------------- -# Set the directory that contains this scripts. If the command was issued using a -# relative or absolute path, that path is in $ProgDir. Otherwise assume the -# command was issued from the current working directory. - -(my $ProgName = $0) =~ s!(.*)/!!; # name of this script -my $ProgDir = $1; # name of directory containing this script -- may be a - # relative or absolute path, or null if the script is in - # the user's PATH -my $cmdline = "@ARGV"; # Command line arguments to script -my $cwd = getcwd(); # current working directory -my $scrdir; # absolute pathname of directory that contains this script -my $nm = "$ProgName::"; # name to use if script dies -if ($ProgDir) { - $scrdir = absolute_path($ProgDir); -} else { - $scrdir = $cwd; -} - -my $gridfilename = "fatmlndfrc"; - -#----------------------------------------------------------------------------------------------- - -sub usage { - die < 90.0) ) { - die <<"EOF"; -** $ProgName - Bad value for latitude (=$lat) for $desc ** -EOF - } - if ( ($lon < 0.) || ($lon > 360.0) ) { - die <<"EOF"; -** $ProgName - Bad value for longitude (=$lat) for $desc ** -EOF - } - return( $lat, $lon ); - -} - -#----------------------------------------------------------------------------------------------- - -# Process command-line options. - -my %opts = ( - SW_corner => undef, - NE_corner => undef, - infilelist => undef, - outfilelist => undef, - help => 0, - verbose => 0, - debug => 0, - ); -GetOptions( - "sw|SW_corner=s" => \$opts{'SW_corner'}, - "ne|NE_corner=s" => \$opts{'NE_corner'}, - "i|infilelist=s" => \$opts{'infilelist'}, - "o|outfilelist=s" => \$opts{'outfilelist'}, - "h|help" => \$opts{'help'}, - "d|debug" => \$opts{'debug'}, - "v|verbose" => \$opts{'verbose'}, -) or usage(); - -# Give usage message. -usage() if $opts{'help'}; - -# Check for unparsed arguments -if (@ARGV) { - print "ERROR: unrecognized arguments: @ARGV\n"; - usage(); -} - -if ( ! defined($opts{'infilelist'}) || ! defined($opts{'outfilelist'}) ) { - print "ERROR: MUST set both infilelist and outfilelist\n"; - usage(); -} -if ( ! defined($opts{'SW_corner'}) || ! defined($opts{'NE_corner'}) ) { - print "ERROR: MUST set both SW_corner and NE_corner\n"; - usage(); -} - -my ($S_lat,$W_lon) = get_latlon( $opts{'SW_corner'}, "SW" ); -my ($N_lat,$E_lon) = get_latlon( $opts{'NE_corner'}, "NE" ); - -if ( $N_lat <= $S_lat ) { - print "ERROR: NE corner latitude less than or equal to SW corner latitude\n"; - usage(); -} -if ( $E_lon <= $W_lon ) { - print "ERROR: NE corner longitude less than or equal to SW corner longitude\n"; - usage(); -} - -#----------------------------------------------------------------------------------------------- -my $debug; -if ( $opts{'debug'} ) { - $debug = "DEBUG=TRUE"; -} -my $print; -if ( $opts{'verbose'} ) { - $print = "PRINT=TRUE"; -} - -my %infiles = parse_filelist( $opts{'infilelist'} ); -my %outfiles = parse_filelist( $opts{'outfilelist'} ); - -(my $GRIDFILE, my $NFILES, my $INFILES, my $OUTFILES) = get_filelists( \%infiles, \%outfiles ); - -write_usermods( \%outfiles ); - -my $cmd = "env S_LAT=$S_lat W_LON=$W_lon N_LAT=$N_lat E_LON=$E_lon " . - "GRIDFILE=$GRIDFILE NFILES=$NFILES OUTFILELIST=$OUTFILES INFILELIST=$INFILES " . - "$debug $print ncl $scrdir/getregional_datasets.ncl"; - -print "Execute: $cmd\n"; -system( $cmd ); -system( "/bin/rm $INFILES $OUTFILES" ); - -#------------------------------------------------------------------------------- - -sub parse_filelist { -# -# Parse a list of files (in "filename = 'filepath'" format) into a hash -# - my $file = shift; - - # check that the file exists - (-f $file) or die "$nm: failed to find filelist file $file"; - my $fh = IO::File->new($file, '<') or die "$nm: can't open file: $file\n"; - - my %files = ( ); - my $valstring1 = '\'[^\']*\''; - my $valstring2 = '"[^"]*"'; - while( my $line = <$fh> ) { - if ( $line =~ m/^\s*(\S+)\s*=\s*($valstring1|$valstring2)$/ ) { - my $var = $1; - my $string = $2; - $string =~ s/'|"//g; - if ( exists($files{$var}) ) { - die "$nm: variable listed twice in file ($file): $var\n"; - } - $files{$var} = $string; - # Ignore empty lines or comments - } elsif ( ($line =~ m/^\s*$/) || ($line =~ m/^\s*!/) ) { - # ignore empty lines or comments - } else { - die "$nm: unexpected line in $file: $line\n"; - } - } - $fh->close; - - return( %files ); -} - -#------------------------------------------------------------------------------- - -sub get_filelists { -# -# Make sure file hashes compare correctly, and if so return in and out lists -# on files -# - my $infiles_ref = shift; - my $outfiles_ref = shift; - - my @infiles = sort( keys(%$infiles_ref ) ); - my @outfiles = sort( keys(%$outfiles_ref) ); - - if ( $#infiles != $#outfiles ) { - die "$nm: number of infiles is different from outfiles\n"; - } - if ( "@infiles" ne "@outfiles" ) { - die "$nm: list of infiles is different from outfiles list\n"; - } - my $infilelist = "infilelist_getregional_datasets___tmp.lst"; - my $outfilelist = "outfilelist_getregional_datasets___tmp.lst"; - my $fhin = IO::File->new($infilelist, '>') or die "$nm: can't open file: $infilelist\n"; - my $fhout = IO::File->new($outfilelist, '>') or die "$nm: can't open file: $outfilelist\n"; - - my $nfiles = 0; - foreach my $file ( @infiles ) { - my $infile = $$infiles_ref{$file}; - if ( ! -f "$infile" ) { - die "$nm: infile ($file) $infile does NOT exist!\n"; - } - print $fhin "$infile\n"; - my $outfile = $$outfiles_ref{$file}; - if ( -f "$outfile" ) { - die "$nm: outfile ($file) $outfile already exists, delete it if you want to overwrite!\n"; - } - print $fhout "$outfile\n"; - $nfiles++; - } - $fhin->close(); - $fhout->close(); - my $var = $gridfilename; - my $gridfile = ""; - if ( exists($$infiles_ref{$var}) ) { - $gridfile = $$infiles_ref{$var}; - } else { - die "$nm: the grid file ($var) is required to be on the lists!\n"; - } - - return( $gridfile, $nfiles, $infilelist, $outfilelist ); -} - -#------------------------------------------------------------------------------- - -sub write_usermods { -# -# Write the user_nl_clm and xmlchng_cmnds files out -# These can be used to setup a case after getregional_datasets is run. -# - my $outfiles_ref = shift; - - my $cwd = getcwd(); # current working directory - - # - # Write out the user_nl_clm file - # - my $usrnlfile = "user_nl_clm"; - my $fh = IO::File->new($usrnlfile, '>') or die "$nm: can't open file: $usrnlfile\n"; - - my $outgridfile = undef; - foreach my $file ( sort(keys(%$outfiles_ref)) ) { - my $filepath = $$outfiles_ref{$file}; - # Add current directory on front of path if not an absolute path in filepath - if ( $filepath !~ m/^\// ) { - $filepath = "$cwd/$filepath"; - } - # Write all filenames out besides the gridfilename - if ( $file ne $gridfilename ) { - print $fh "$file = '$filepath'\n"; - } else { - $outgridfile = $filepath; - } - } - $fh->close(); - # - # Write out the xmlchnge_cmnds file - # - (my $filename = $outgridfile)=~ s!(.*)/!!; - my $filedir = $1; - my $cmndsfile = "xmlchange_cmnds"; - my $fh = IO::File->new($cmndsfile, '>') or die "$nm: can't open file: $cmndsfile\n"; - print $fh "./xmlchange ATM_DOMAIN_PATH=$filedir\n"; - print $fh "./xmlchange LND_DOMAIN_PATH=$filedir\n"; - print $fh "./xmlchange ATM_DOMAIN_FILE=$filename\n"; - print $fh "./xmlchange LND_DOMAIN_FILE=$filename\n"; - $fh->close(); -} - -#------------------------------------------------------------------------------- - -sub absolute_path { -# -# Convert a pathname into an absolute pathname, expanding any . or .. characters. -# Assumes pathnames refer to a local filesystem. -# Assumes the directory separator is "/". -# - my $path = shift; - my $cwd = getcwd(); # current working directory - my $abspath; # resulting absolute pathname - -# Strip off any leading or trailing whitespace. (This pattern won't match if -# there's embedded whitespace. - $path =~ s!^\s*(\S*)\s*$!$1!; - -# Convert relative to absolute path. - - if ($path =~ m!^\.$!) { # path is "." - return $cwd; - } elsif ($path =~ m!^\./!) { # path starts with "./" - $path =~ s!^\.!$cwd!; - } elsif ($path =~ m!^\.\.$!) { # path is ".." - $path = "$cwd/.."; - } elsif ($path =~ m!^\.\./!) { # path starts with "../" - $path = "$cwd/$path"; - } elsif ($path =~ m!^[^/]!) { # path starts with non-slash character - $path = "$cwd/$path"; - } - - my ($dir, @dirs2); - my @dirs = split "/", $path, -1; # The -1 prevents split from stripping trailing nulls - # This enables correct processing of the input "/". - - # Remove any "" that are not leading. - for (my $i=0; $i<=$#dirs; ++$i) { - if ($i == 0 or $dirs[$i] ne "") { - push @dirs2, $dirs[$i]; - } - } - @dirs = (); - - # Remove any "." - foreach $dir (@dirs2) { - unless ($dir eq ".") { - push @dirs, $dir; - } - } - @dirs2 = (); - - # Remove the "subdir/.." parts. - foreach $dir (@dirs) { - if ( $dir !~ /^\.\.$/ ) { - push @dirs2, $dir; - } else { - pop @dirs2; # remove previous dir when current dir is .. - } - } - if ($#dirs2 == 0 and $dirs2[0] eq "") { return "/"; } - $abspath = join '/', @dirs2; - return( $abspath ); -} - -#------------------------------------------------------------------------------- - diff --git a/tools/site_and_regional/sample_inlist b/tools/site_and_regional/sample_inlist deleted file mode 100644 index 0b13271540..0000000000 --- a/tools/site_and_regional/sample_inlist +++ /dev/null @@ -1,18 +0,0 @@ -! -! Sample input file of global datasets to extract a region from. Only works -! on standard 2D CLM files such as: fatmlndfrc, fsurdat, flanduse_timeseries, -! stream_fldfilename_ndep, stream_fldfilename_lightng, and/or -! stream_fldfilename_popdens. -! -! format: list of filenames similar to namelist format with a single line -! per file with the format of: -! -! variable = "filepath" -! -! Lines (such as these) with "!" are ignored. -! - -! fatmlndfrc is REQUIRED! MUST BE ON THE SAME GRID AS ALL OF THE DATAFILES BELOW!!!! - - fatmlndfrc = '/glade/p/cesmdata/cseg/inputdata/share/domains/domain.clm/domain.lnd.0.125x0.125_tx0.1v2.140704.nc' - fsurdat = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/surfdata_map/surfdata_0.125x0.125_simyr2000_c150114.nc' diff --git a/tools/site_and_regional/sample_inlist_0.5popd b/tools/site_and_regional/sample_inlist_0.5popd deleted file mode 100644 index 8d4480fff4..0000000000 --- a/tools/site_and_regional/sample_inlist_0.5popd +++ /dev/null @@ -1,22 +0,0 @@ -! -! Sample input file of global datasets to extract a region from. Only works -! on standard 2D CLM files such as: fatmlndfrc, fsurdat, flanduse_timeseries, -! stream_fldfilename_ndep, stream_fldfilename_lightng, and/or -! stream_fldfilename_popdens. -! -! format: list of filenames similar to namelist format with a single line -! per file with the format of: -! -! variable = "filepath" -! -! Lines (such as these) with "!" are ignored. -! - -! fatmlndfrc is REQUIRED! MUST BE ON THE SAME GRID AS ALL OF THE DATAFILES BELOW!!!! - - fatmlndfrc = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/firedata/clmforc.Li_2012_hdm_0.5x0.5_AVHRR_simyr1850-2010_c130401.nc' -! -! The following files are interpolated by default so technically do NOT need -! to be extracted, but it will help performance. -! - stream_fldfilename_popdens = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/firedata/clmforc.Li_2012_hdm_0.5x0.5_AVHRR_simyr1850-2010_c130401.nc' diff --git a/tools/site_and_regional/sample_inlist_T62 b/tools/site_and_regional/sample_inlist_T62 deleted file mode 100644 index ac5ee2ed8d..0000000000 --- a/tools/site_and_regional/sample_inlist_T62 +++ /dev/null @@ -1,23 +0,0 @@ -! -! Sample input file of global datasets to extract a region from. Only works -! on standard 2D CLM files such as: fatmlndfrc, fsurdat, flanduse_timeseries, -! stream_fldfilename_ndep, stream_fldfilename_lightng, and/or -! stream_fldfilename_popdens. -! -! format: list of filenames similar to namelist format with a single line -! per file with the format of: -! -! variable = "filepath" -! -! Lines (such as these) with "!" are ignored. -! - -! fatmlndfrc is REQUIRED! MUST BE ON THE SAME GRID AS ALL OF THE DATAFILES BELOW!!!! - - fatmlndfrc = '/glade/p/cesmdata/cseg/inputdata/atm/datm7/atm_forcing.datm7.Qian.T62.c080727/domain.T62.050609.nc' -! -! The following files are interpolated by default so technically do NOT need -! to be extracted, but it will help performance. -! - stream_fldfilename_lightng = '/glade/p/cesmdata/cseg/inputdata/atm/datm7/NASA_LIS/clmforc.Li_2012_climo1995-2011.T62.lnfm_c130327.nc' - strm_datfil = '/glade/p/cesmdata/cseg/inputdata/atm/datm7/atm_forcing.datm7.Qian.T62.c080727/Solar6Hrly/clmforc.Qian.c2006.T62.Solr.1948-01.nc' diff --git a/tools/site_and_regional/sample_inlist_ndep b/tools/site_and_regional/sample_inlist_ndep deleted file mode 100644 index 726c7ffbaf..0000000000 --- a/tools/site_and_regional/sample_inlist_ndep +++ /dev/null @@ -1,22 +0,0 @@ -! -! Sample input file of global datasets to extract a region from. Only works -! on standard 2D CLM files such as: fatmlndfrc, fsurdat, flanduse_timeseries, -! stream_fldfilename_ndep, stream_fldfilename_lightng, and/or -! stream_fldfilename_popdens. -! -! format: list of filenames similar to namelist format with a single line -! per file with the format of: -! -! variable = "filepath" -! -! Lines (such as these) with "!" are ignored. -! - -! fatmlndfrc is REQUIRED! MUST BE ON THE SAME GRID AS ALL OF THE DATAFILES BELOW!!!! - - fatmlndfrc = '/glade/p/cesmdata/cseg/inputdata/share/domains/domain.lnd.fv1.9x2.5_gx1v6.090206.nc' -! -! The following files are interpolated by default so technically do NOT need -! to be extracted, but it will help performance. -! - stream_fldfilename_ndep = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/ndepdata/fndep_clm_hist_simyr1849-2006_1.9x2.5_c100428.nc' diff --git a/tools/site_and_regional/sample_outlist b/tools/site_and_regional/sample_outlist deleted file mode 100644 index fd153e7206..0000000000 --- a/tools/site_and_regional/sample_outlist +++ /dev/null @@ -1,14 +0,0 @@ -! -! Sample input file of regional datasets that will be created. You need to have -! the same list of files as in the input filelist as well. See the sample_inlist -! for the list of files that can be operated on. -! -! format: list of filenames similar to namelist format with a single line -! per file with the format of: -! -! variable = "filepath" -! -! Lines (such as these) with "!" are ignored. -! - fatmlndfrc = 'domain.lnd.184x256pt_0.125x0.125_alaskaUSA_tx0.1v2_c150114.nc' - fsurdat = 'surfdata_184x256pt_0.125x0.125_alaskaUSA_simyr2000_c150114.nc' diff --git a/tools/site_and_regional/sample_outlist_0.5popd b/tools/site_and_regional/sample_outlist_0.5popd deleted file mode 100644 index 671a55037d..0000000000 --- a/tools/site_and_regional/sample_outlist_0.5popd +++ /dev/null @@ -1,14 +0,0 @@ -! -! Sample input file of regional datasets that will be created. You need to have -! the same list of files as in the input filelist as well. See the sample_inlist -! for the list of files that can be operated on. -! -! format: list of filenames similar to namelist format with a single line -! per file with the format of: -! -! variable = "filepath" -! -! Lines (such as these) with "!" are ignored. -! - fatmlndfrc = 'domain.lnd.0.5x0.5_alaskaUSA_gx1v6_c141117.nc' -stream_fldfilename_popdens = 'clmforc.Li_2012_hdm_0.5x0.5_AVHRR_simyr1850-2010_c141117.nc' diff --git a/tools/site_and_regional/sample_outlist_T62 b/tools/site_and_regional/sample_outlist_T62 deleted file mode 100644 index 3dfe69148a..0000000000 --- a/tools/site_and_regional/sample_outlist_T62 +++ /dev/null @@ -1,16 +0,0 @@ -! -! Sample input file of regional datasets that will be created. You need to have -! the same list of files as in the input filelist as well. See the sample_inlist -! for the list of files that can be operated on. -! -! format: list of filenames similar to namelist format with a single line -! per file with the format of: -! -! variable = "filepath" -! -! Lines (such as these) with "!" are ignored. -! - fatmlndfrc = 'domain.lnd.T62_alaskaUSA_c141117.nc' - - stream_fldfilename_lightng = 'clmforc.Li_2012_climo1995-2011.T62_alaskaUSA.lnfm_c141117.nc' - strm_datfil = 'clmforc.Qian.c2006.T62_alaskaUSA.Solr.1948-01.nc' diff --git a/tools/site_and_regional/sample_outlist_ndep b/tools/site_and_regional/sample_outlist_ndep deleted file mode 100644 index d3dad24ae7..0000000000 --- a/tools/site_and_regional/sample_outlist_ndep +++ /dev/null @@ -1,15 +0,0 @@ -! -! Sample input file of regional datasets that will be created. You need to have -! the same list of files as in the input filelist as well. See the sample_inlist -! for the list of files that can be operated on. -! -! format: list of filenames similar to namelist format with a single line -! per file with the format of: -! -! variable = "filepath" -! -! Lines (such as these) with "!" are ignored. -! - fatmlndfrc = 'domain.lnd.13x12pt_f19_alaskaUSA_gx1v6_c141117.nc' - - stream_fldfilename_ndep = 'fndep_clm_hist_simyr1849-2006_13x12pt_f19_alaskaUSA_c141117.nc' From 612e35a31348e112e89ff005005456e4e786125d Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Sun, 9 Jan 2022 20:51:34 -0700 Subject: [PATCH 107/223] Remove the getregional_datasets scripts and the testing for it as well as the documentation about them --- test/tools/TSMscript_tools.sh | 3 --- test/tools/input_tests_master | 9 --------- test/tools/tests_posttag_nompi_regression | 5 ----- test/tools/tests_pretag_cheyenne_nompi | 1 - tools/site_and_regional/README | 14 +------------- 5 files changed, 1 insertion(+), 31 deletions(-) diff --git a/test/tools/TSMscript_tools.sh b/test/tools/TSMscript_tools.sh index a191cebe1f..943fec97f2 100755 --- a/test/tools/TSMscript_tools.sh +++ b/test/tools/TSMscript_tools.sh @@ -43,9 +43,6 @@ if [ $? -ne 0 ]; then fi cd ${rundir} -# Copy any sample files so can use them -cp $cfgdir/sample_* $rundir - optfile=${3%^*} cfgfile=${3#*^} diff --git a/test/tools/input_tests_master b/test/tools/input_tests_master index fd0d7efd6e..257bef8d0a 100644 --- a/test/tools/input_tests_master +++ b/test/tools/input_tests_master @@ -48,12 +48,3 @@ smi59 TSMscript_tools.sh mkmapdata mkmapdata.sh mkmapdata_if10 bli59 TBLscript_tools.sh mkmapdata mkmapdata.sh mkmapdata_if10 smi79 TSMscript_tools.sh mkmapdata mkmapdata.sh mkmapdata_i1x1_brazil bli79 TBLscript_tools.sh mkmapdata mkmapdata.sh mkmapdata_i1x1_brazil - -smiS4 TSMscript_tools.sh site_and_regional getregional_datasets.pl getregional -bliS4 TBLscript_tools.sh site_and_regional getregional_datasets.pl getregional -smiS8 TSMscript_tools.sh site_and_regional getregional_datasets.pl getregional_ndep -bliS8 TBLscript_tools.sh site_and_regional getregional_datasets.pl getregional_ndep -smiS9 TSMscript_tools.sh site_and_regional getregional_datasets.pl getregional_T62 -bliS9 TBLscript_tools.sh site_and_regional getregional_datasets.pl getregional_T62 -smiS0 TSMscript_tools.sh site_and_regional getregional_datasets.pl getregional_05popd -bliS0 TBLscript_tools.sh site_and_regional getregional_datasets.pl getregional_05popd diff --git a/test/tools/tests_posttag_nompi_regression b/test/tools/tests_posttag_nompi_regression index 5b5d76fd60..1395aebe11 100644 --- a/test/tools/tests_posttag_nompi_regression +++ b/test/tools/tests_posttag_nompi_regression @@ -9,8 +9,3 @@ smi74 bli74 smi78 bli78 smiT4 bliT4 smiT2 bliT2 -smiS4 bliS4 -smiS8 bliS8 -smiS9 bliS9 -smiS0 bliS0 -smiS0 bliS0 diff --git a/test/tools/tests_pretag_cheyenne_nompi b/test/tools/tests_pretag_cheyenne_nompi index fec9d08448..8075eab50b 100644 --- a/test/tools/tests_pretag_cheyenne_nompi +++ b/test/tools/tests_pretag_cheyenne_nompi @@ -11,7 +11,6 @@ smi64 bli64 smi54 bli54 smi57 bli57 smi58 bli58 -smiS4 bliS4 smi74 bli74 smiT4 bliT4 smiT2 bliT2 diff --git a/tools/site_and_regional/README b/tools/site_and_regional/README index ca7507d7d6..7ab81b370e 100644 --- a/tools/site_and_regional/README +++ b/tools/site_and_regional/README @@ -10,7 +10,7 @@ ncar_pylib Brief description of scripts: -subset_data.py +subset_data create regional domain, surface data, and rtm directional files by extracting data from global datasets @@ -27,20 +27,8 @@ mknoocnmap.pl Script to create unity mapping dataset for single-point or regional studies over land-only (no ocean). -getregional_datasets.pl - Extract out regional datasets from global ones and put files in a location that - can be used by build-namelist. - NCL Scripts available: mkunitymap.ncl NCL script to create a unity map -- ran by above script (mknoocnmap.pl) -getregional_datasets.ncl - NCL script to extract out regional datasets. - -Input datafiles: - -sample_infile* --- Sample datafiles with list of files for getregional_datasets.pl to operate on -sample_outfile* -- Sample datafiles with list of files for getregional_datasets.pl to create - From 632a500d1fd376d47edcbb5792089774fb1fe182 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sun, 9 Jan 2022 22:05:30 -0700 Subject: [PATCH 108/223] update more comments. --- python/ctsm/site_and_regional/base_case.py | 3 - .../ctsm/site_and_regional/regional_case.py | 16 ++--- .../site_and_regional/single_point_case.py | 40 +++++------ python/ctsm/subset_data.py | 70 ++++++++++--------- 4 files changed, 65 insertions(+), 64 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 272af59a67..27637a0a26 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -50,9 +50,6 @@ class BaseCase: ------- create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) create 1d coordinate variables to enable sel() method - add_tag_to_filename(filename, tag) - add a tag and timetag to a filename ending with - [._]cYYMMDD.nc or [._]YYMMDD.nc update_metadata(nc) Class method for adding some new attributes (such as date, username) and remove the old attributes from the netcdf file. diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 107c2ab4df..84206dc283 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -74,7 +74,7 @@ def __init__( create_landuse, create_datm, create_user_mods, - output_dir, + out_dir, ): """ Initializes RegionalCase with the given arguments. @@ -86,7 +86,7 @@ def __init__( self.lon1 = lon1 self.lon2 = lon2 self.reg_name = reg_name - self.output_dir = output_dir + self.out_dir = out_dir self.create_tag() def create_tag(self): @@ -110,7 +110,7 @@ def create_domain_at_reg(self, indir, file): fdomain_in = os.path.join(indir, file) fdomain_out = add_tag_to_filename(fdomain_in, self.tag) logger.info("fdomain_in: %s", fdomain_in) - logger.info("fdomain_out: %s", os.path.join(self.output_dir, fdomain_out)) + logger.info("fdomain_out: %s", os.path.join(self.out_dir, fdomain_out)) logger.info("Creating domain file at region: %s", self.tag) # create 1d coordinate variables to enable sel() method @@ -128,7 +128,7 @@ def create_domain_at_reg(self, indir, file): f_out.attrs["Created_from"] = fdomain_in # mode 'w' overwrites file - wfile = os.path.join(self.output_dir, fdomain_out) + wfile = os.path.join(self.out_dir, fdomain_out) f_out.to_netcdf(path=wfile, mode="w", format='NETCDF3_64BIT') logger.info("Successfully created file (fdomain_out) %s", wfile) f_in.close() @@ -145,7 +145,7 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir): fsurf_in = os.path.join(indir, file) fsurf_out = add_tag_to_filename(fsurf_in, self.tag) logger.info("fsurf_in: %s", fsurf_in) - logger.info("fsurf_out: %s", os.path.join(self.output_dir, fsurf_out)) + logger.info("fsurf_out: %s", os.path.join(self.out_dir, fsurf_out)) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") @@ -162,7 +162,7 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir): f_out.attrs["Created_from"] = fsurf_in # mode 'w' overwrites file - wfile = os.path.join(self.output_dir, fsurf_out) + wfile = os.path.join(self.out_dir, fsurf_out) f_out.to_netcdf(path=wfile, mode="w", format='NETCDF3_64BIT') logger.info("created file (fsurf_out) %s", wfile) f_in.close() @@ -185,7 +185,7 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir): fluse_in = os.path.join(indir, file) fluse_out = add_tag_to_filename(fluse_in, self.tag) logger.info("fluse_in: %s", fluse_in) - logger.info("fluse_out: %s", os.path.join(self.output_dir, fluse_out)) + logger.info("fluse_out: %s", os.path.join(self.out_dir, fluse_out)) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord( @@ -204,7 +204,7 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir): f_out.attrs["Created_from"] = fluse_in # mode 'w' overwrites file - wfile = os.path.join(self.output_dir, fluse_out) + wfile = os.path.join(self.out_dir, fluse_out) f_out.to_netcdf(path=wfile, mode="w", format='NETCDF3_64BIT') logger.info("Successfully created file (fluse_out) %s", wfile) f_in.close() diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 951883335f..04bd11ee22 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -44,7 +44,7 @@ class SinglePointCase(BaseCase): flag for creating user mods directories and files overwrite_single_pft : bool flag to overwrite the whole grid 100% single PFT. - dominant_pft : int + dom_pft : int dominant pft type for this single point zero_nonveg_landunits : bool flag for setting all non-vegetation landunits to zero @@ -92,11 +92,11 @@ def __init__( create_datm, create_user_mods, overwrite_single_pft, - dominant_pft, + dom_pft, include_nonveg, - uniform_snowpack, + uni_snow, cap_saturation, - output_dir, + out_dir, ): super().__init__(create_domain, create_surfdata, create_landuse, create_datm, create_user_mods) @@ -104,11 +104,11 @@ def __init__( self.plon = plon self.site_name = site_name self.overwrite_single_pft = overwrite_single_pft - self.dominant_pft = dominant_pft + self.dom_pft = dom_pft self.include_nonveg = include_nonveg - self.uniform_snowpack = uniform_snowpack + self.uni_snow = uni_snow self.cap_saturation = cap_saturation - self.output_dir = output_dir + self.out_dir = out_dir self.create_tag() def create_tag(self): @@ -132,7 +132,7 @@ def create_domain_at_point(self, indir, file): fdomain_in = os.path.join(indir, file) fdomain_out = add_tag_to_filename(fdomain_in, self.tag) logger.info("fdomain_in: %s", fdomain_in) - logger.info("fdomain_out: %s", os.path.join(self.output_dir, fdomain_out)) + logger.info("fdomain_out: %s", os.path.join(self.out_dir, fdomain_out)) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fdomain_in, "xc", "yc", "ni", "nj") @@ -147,7 +147,7 @@ def create_domain_at_point(self, indir, file): self.update_metadata(f_out) f_out.attrs["Created_from"] = fdomain_in - wfile = os.path.join(self.output_dir, fdomain_out) + wfile = os.path.join(self.out_dir, fdomain_out) f_out.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") logger.info("Successfully created file (fdomain_out) %s", wfile) f_in.close() @@ -164,7 +164,7 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): fluse_in = os.path.join(indir, file) fluse_out = add_tag_to_filename(fluse_in, self.tag) logger.info("fluse_in: %s", fluse_in) - logger.info("fluse_out: %s", os.path.join(self.output_dir, fluse_out)) + logger.info("fluse_out: %s", os.path.join(self.out_dir, fluse_out)) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord( @@ -191,7 +191,7 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): self.update_metadata(f_out) f_out.attrs["Created_from"] = fluse_in - wfile = os.path.join(self.output_dir, fluse_out) + wfile = os.path.join(self.out_dir, fluse_out) # mode 'w' overwrites file f_out.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") logger.info("Successfully created file (fluse_out), %s", wfile) @@ -216,7 +216,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): fsurf_in = os.path.join(indir, file) fsurf_out = add_tag_to_filename(fsurf_in, self.tag) logger.info("fsurf_in: %s", fsurf_in) - logger.info("fsurf_out: %s", os.path.join(self.output_dir, fsurf_out)) + logger.info("fsurf_out: %s", os.path.join(self.out_dir, fsurf_out)) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") @@ -231,8 +231,8 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # modify surface data properties if self.overwrite_single_pft: f_out["PCT_NAT_PFT"][:, :, :] = 0 - if self.dominant_pft < 16: - f_out['PCT_NAT_PFT'][:, :, self.dominant_pft] = 100 + if self.dom_pft < 16: + f_out['PCT_NAT_PFT'][:, :, self.dom_pft] = 100 if not self.include_nonveg: f_out["PCT_NATVEG"][:, :] = 100 f_out["PCT_CROP"][:, :] = 0 @@ -240,7 +240,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): f_out["PCT_WETLAND"][:, :] = 0.0 f_out["PCT_URBAN"][:, :, ] = 0.0 f_out["PCT_GLACIER"][:, :] = 0.0 - if self.uniform_snowpack: + if self.uni_snow: f_out["STD_ELEV"][:, :] = 20.0 if self.cap_saturation: f_out["FMAX"][:, :] = 0.0 @@ -273,7 +273,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): self.update_metadata(f_out) f_out.attrs["Created_from"] = fsurf_in # mode 'w' overwrites file - wfile = os.path.join(self.output_dir, fsurf_out) + wfile = os.path.join(self.out_dir, fsurf_out) f_out.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") logger.info("Successfully created file (fsurf_out) %s", wfile) f_in.close() @@ -298,7 +298,7 @@ def create_datmdomain_at_point(self, datm_tuple: DatmFiles): datm_file = add_tag_to_filename(fdatmdomain_in, self.tag) fdatmdomain_out = os.path.join(datm_tuple.outdir, datm_file) logger.info("fdatmdomain_in: %s", fdatmdomain_in) - logger.info("fdatmdomain out: %s", os.path.join(self.output_dir, fdatmdomain_out)) + logger.info("fdatmdomain out: %s", os.path.join(self.out_dir, fdatmdomain_out)) # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fdatmdomain_in, "xc", "yc", "ni", "nj") @@ -314,7 +314,7 @@ def create_datmdomain_at_point(self, datm_tuple: DatmFiles): f_out.attrs["Created_from"] = fdatmdomain_in # mode 'w' overwrites file - wfile = os.path.join(self.output_dir, fdatmdomain_out) + wfile = os.path.join(self.out_dir, fdatmdomain_out) f_out.to_netcdf(path=wfile, mode="w", format = 'NETCDF3_64BIT') logger.info("Successfully created file (fdatmdomain_out) : %s", wfile) f_in.close() @@ -353,7 +353,7 @@ def write_shell_commands(self, file): # write_to_file surrounds text with newlines with open(file, 'w') as nl_file: self.write_to_file("# Change below line if you move the subset data directory", nl_file) - self.write_to_file("./xmlchange {}={}".format(USRDAT_DIR, self.output_dir), nl_file) + self.write_to_file("./xmlchange {}={}".format(USRDAT_DIR, self.out_dir), nl_file) self.write_to_file("./xmlchange PTS_LON={}".format(str(self.plon)), nl_file) self.write_to_file("./xmlchange PTS_LAT={}".format(str(self.plat)), nl_file) self.write_to_file("./xmlchange MPILIB=mpi-serial", nl_file) @@ -403,7 +403,7 @@ def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_s "{}{}.nc".format(datm_tuple.tag_tpqw, dtag)) ftpqw2 = "{}{}.{}.nc".format(datm_tuple.tag_tpqw, self.tag, dtag) - outdir = os.path.join(self.output_dir, datm_tuple.outdir) + outdir = os.path.join(self.out_dir, datm_tuple.outdir) infile += [fsolar, fprecip, ftpqw] outfile += [os.path.join(outdir, fsolar2), os.path.join(outdir, fprecip2), diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 71c66e44ea..a2c0ac567e 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -68,6 +68,7 @@ from ctsm.site_and_regional.regional_case import RegionalCase from ctsm.args_utils import plon_type, plat_type from ctsm.path_utils import path_to_ctsm_root +from ctsm.utils import abort # -- import ctsm logging flags from ctsm.ctsm_logging import ( @@ -414,20 +415,20 @@ def subset_point(args, file_dict: dict): # -- Create SinglePoint Object single_point = SinglePointCase( - args.plat, - args.plon, - args.site_name, - args.create_domain, - args.create_surfdata, - args.create_landuse, - args.create_datm, - args.create_user_mods, - args.overwrite_single_pft, - args.dom_pft, - args.include_nonveg, - args.uni_snow, - args.cap_saturation, - args.out_dir, + plat = args.plat, + plon = args.plon, + site_name = args.site_name, + create_domain = args.create_domain, + create_surfdata = args.create_surfdata, + create_landuse = args.create_landuse, + create_datm = args.create_datm, + create_user_mods = args.create_user_mods, + overwrite_single_pft = args.overwrite_single_pft, + dom_pft = args.dom_pft, + include_nonveg = args.include_nonveg, + uni_snow = args.uni_snow, + cap_saturation = args.cap_saturation, + out_dir = args.out_dir, ) logger.debug(single_point) @@ -473,17 +474,17 @@ def subset_region(args, file_dict: dict): # -- Create Region Object region = RegionalCase( - args.lat1, - args.lat2, - args.lon1, - args.lon2, - args.reg_name, - args.create_domain, - args.create_surfdata, - args.create_landuse, - args.create_datm, - args.create_user_mods, - args.out_dir, + lat1 = args.lat1, + lat2 = args.lat2, + lon1 = args.lon1, + lon2 = args.lon2, + reg_name = args.reg_name, + create_domain = args.create_domain, + create_surfdata = args.create_surfdata, + create_landuse = args.create_landuse, + create_datm = args.create_datm, + create_user_mods = args.create_user_mods, + out_dir = args.out_dir, ) logger.debug(region) @@ -503,7 +504,6 @@ def subset_region(args, file_dict: dict): args.user_mods_dir) logger.info("Successfully ran script for a regional case.") - sys.exit() def main(): @@ -534,14 +534,18 @@ def main(): # print help and exit when no option is chosen if args.run_type != "point" and args.run_type != "region": - print("Must supply a positional argument: 'point' or 'region'.") - print("See ./subset_data --help for more help.") - sys.exit() + err_msg = ( + "Must supply a positional argument: 'point' or 'region'. \n" + "See ./subset_data --help for more help." + ) + abort(err_msg) if not any([args.create_surfdata, args.create_domain, args.create_landuse, args.create_datm]): - print("Must supply one of:") - print(" --create-surface \n --create-landuse \n --create-datm \n --create-domain") - print("See ./subset_data --help for more help.") - sys.exit() + err_msg = ( + "Must supply one of: \n", + " --create-surface \n --create-landuse \n --create-datm \n --create-domain \n " + "See ./subset_data --help for more help." + ) + abort(err_msg) # create files and folders necessary and return dictionary of file/folder locations file_dict = setup_files(args, defaults, cesmroot) From 2b20576cc72a24f202d8518aaf96892c7558e9e7 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sun, 9 Jan 2022 22:07:37 -0700 Subject: [PATCH 109/223] updates regarding config_vals. --- python/ctsm/modify_fsurdat/modify_fsurdat.py | 3 +- python/ctsm/test/test_unit_modify_fsurdat.py | 2 +- python/ctsm/utils.py | 73 -------------------- 3 files changed, 3 insertions(+), 75 deletions(-) diff --git a/python/ctsm/modify_fsurdat/modify_fsurdat.py b/python/ctsm/modify_fsurdat/modify_fsurdat.py index 245ff887ae..13f07e60c0 100644 --- a/python/ctsm/modify_fsurdat/modify_fsurdat.py +++ b/python/ctsm/modify_fsurdat/modify_fsurdat.py @@ -12,7 +12,8 @@ import xarray as xr from ctsm.git_utils import get_ctsm_git_short_hash -from ctsm.utils import abort, update_metadata, lon_range_0_to_360 +from ctsm.utils import abort, update_metadata +from ctsm.config_utils import lon_range_0_to_360 logger = logging.getLogger(__name__) diff --git a/python/ctsm/test/test_unit_modify_fsurdat.py b/python/ctsm/test/test_unit_modify_fsurdat.py index 19c53dac6a..4914924518 100755 --- a/python/ctsm/test/test_unit_modify_fsurdat.py +++ b/python/ctsm/test/test_unit_modify_fsurdat.py @@ -10,7 +10,7 @@ import xarray as xr from ctsm import unit_testing -from ctsm.utils import lon_range_0_to_360 +from ctsm.config_utils import lon_range_0_to_360 from ctsm.modify_fsurdat.modify_fsurdat import ModifyFsurdat # Allow test names that pylint doesn't like; otherwise hard to make them diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 24ecb8db2b..ac9d567761 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -136,79 +136,6 @@ def update_metadata(file, title, summary, contact, data_script, description): del file.attrs[attr] -def lon_range_0_to_360(lon_in): - """ - Description - ----------- - Restrict longitude to 0 to 360 when given as -180 to 180. - """ - if -180 <= lon_in < 0: - lon_out = lon_in + 360 - logger.info( - "Resetting longitude from %s to %s to keep in the range " " 0 to 360", - str(lon_in), - str(lon_out), - ) - elif 0 <= lon_in <= 360 or lon_in is None: - lon_out = lon_in - else: - errmsg = "lon_in needs to be in the range 0 to 360" - abort(errmsg) - - return lon_out - - -def get_config_value( - config, - section, - item, - file_path, - allowed_values=None, - default=None, - is_list=False, - convert_to_type=None, - can_be_unset=False, -): - """Get a given item from a given section of the config object - Give a helpful error message if we can't find the given section or item - Note that the file_path argument is only used for the sake of the error message - If allowed_values is present, it should be a list of strings giving allowed values - The function _handle_config_value determines what to do if we read: - - a list or - - a str that needs to be converted to int / float / bool - - _CONFIG_UNSET: anything with the value "UNSET" will become "None" - """ - try: - val = config.get(section, item) - except NoSectionError: - abort( - "ERROR: Config file {} must contain section '{}'".format(file_path, section) - ) - except NoOptionError: - abort( - "ERROR: Config file {} must contain item '{}' in section '{}'".format( - file_path, item, section - ) - ) - - if val == _CONFIG_PLACEHOLDER: - abort( - "Error: {} needs to be specified in config file {}".format(item, file_path) - ) - - val = _handle_config_value( - var=val, - default=default, - item=item, - is_list=is_list, - convert_to_type=convert_to_type, - can_be_unset=can_be_unset, - allowed_values=allowed_values, - ) - - return val - - def _handle_config_value( var, default, item, is_list, convert_to_type, can_be_unset, allowed_values ): From d6a26f31e2ba34b6087d1782d00d39b19d6b361d Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Sun, 9 Jan 2022 22:43:21 -0700 Subject: [PATCH 110/223] sorting glob.glob for #1437. --- tools/site_and_regional/modify_singlept_site_neon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/site_and_regional/modify_singlept_site_neon.py b/tools/site_and_regional/modify_singlept_site_neon.py index d3db55126b..2e5890ae81 100755 --- a/tools/site_and_regional/modify_singlept_site_neon.py +++ b/tools/site_and_regional/modify_singlept_site_neon.py @@ -253,7 +253,7 @@ def find_surffile(surf_dir, site_name): # sf_name = "surfdata_hist_16pfts_Irrig_CMIP6_simyr2000_"+site_name+"*.nc" sf_name = "surfdata_hist_78pfts_CMIP6_simyr2000_" + site_name + "*.nc" # surf_file = glob.glob(os.path.join(surf_dir,sf_name)) - surf_file = glob.glob(surf_dir + "/" + sf_name) + surf_file = sorted(glob.glob(surf_dir + "/" + sf_name)) if len(surf_file) > 1: print("The following files found :", *surf_file, sep="\n- ") From d2dc19cb39bdc8d71a9b93a54cbb354921bb9c49 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Mon, 10 Jan 2022 00:18:09 -0700 Subject: [PATCH 111/223] improving the download_data --- .../modify_singlept_site_neon.py | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/tools/site_and_regional/modify_singlept_site_neon.py b/tools/site_and_regional/modify_singlept_site_neon.py index 2e5890ae81..1024da4a6b 100755 --- a/tools/site_and_regional/modify_singlept_site_neon.py +++ b/tools/site_and_regional/modify_singlept_site_neon.py @@ -457,16 +457,21 @@ def download_file(url, fname): fname (str) : file name to save the downloaded file. """ - response = requests.get(url) + try: + response = requests.get(url) - with open(fname, "wb") as f: - f.write(response.content) + with open(fname, "wb") as f: + f.write(response.content) - # -- Check if download status_code - if response.status_code == 200: - print("Download finished successfully for", fname, ".") - elif response.status_code == 404: - print("File " + fname + "was not available on the neon server:" + url) + # -- Check if download status_code + if response.status_code == 200: + print("Download finished successfully for", fname, ".") + elif response.status_code == 404: + print("File " + fname + "was not available on the neon server:" + url) + except Exception as err: + print ('The server could not fulfill the request.') + print ('Something went wrong in downloading', fname) + print ('Error code:', err.code) def fill_interpolate(f2, var, method): From 0407486a10ecb693ca1af629609b64e55ce3ec06 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Mon, 10 Jan 2022 00:20:56 -0700 Subject: [PATCH 112/223] imroving the download_data for #1594 --- tools/site_and_regional/run_neon.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/tools/site_and_regional/run_neon.py b/tools/site_and_regional/run_neon.py index 020bc2e8ee..a9d31b08e9 100755 --- a/tools/site_and_regional/run_neon.py +++ b/tools/site_and_regional/run_neon.py @@ -608,6 +608,32 @@ def parse_neon_listing(listing_file, valid_neon_sites): return available_list +def download_file(url, fname): + """ + Function to download a file. + Args: + url (str): + url of the file for downloading + fname (str) : + file name to save the downloaded file. + """ + try: + response = requests.get(url) + + with open(fname, "wb") as f: + f.write(response.content) + + # -- Check if download status_code + if response.status_code == 200: + print("Download finished successfully for", fname, ".") + elif response.status_code == 404: + print("File " + fname + "was not available on the neon server:" + url) + except Exception as err: + print ('The server could not fulfill the request.') + print ('Something went wrong in downloading', fname) + print ('Error code:', err.code) + + def download_file(url, fname): """ Function to download a file. From 1754666ea0c84a648728e5ec8797756a7dd57dc1 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Mon, 10 Jan 2022 16:25:42 -0700 Subject: [PATCH 113/223] removing overwrite_single_pft --- python/ctsm/site_and_regional/single_point_case.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 04bd11ee22..a8502abb37 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -42,13 +42,11 @@ class SinglePointCase(BaseCase): flag for creating DATM files create_user_mods : bool flag for creating user mods directories and files - overwrite_single_pft : bool - flag to overwrite the whole grid 100% single PFT. dom_pft : int - dominant pft type for this single point + dominant pft type for this single point (None if not specified) zero_nonveg_landunits : bool flag for setting all non-vegetation landunits to zero - overwrite_single_pft : bool + uni_snow : bool flag for creating datasets using uniform snowpack saturation_excess : bool flag for making dataset using saturation excess @@ -91,7 +89,6 @@ def __init__( create_landuse, create_datm, create_user_mods, - overwrite_single_pft, dom_pft, include_nonveg, uni_snow, @@ -103,7 +100,6 @@ def __init__( self.plat = plat self.plon = plon self.site_name = site_name - self.overwrite_single_pft = overwrite_single_pft self.dom_pft = dom_pft self.include_nonveg = include_nonveg self.uni_snow = uni_snow @@ -229,7 +225,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # modify surface data properties - if self.overwrite_single_pft: + if self.dom_pft is not None: f_out["PCT_NAT_PFT"][:, :, :] = 0 if self.dom_pft < 16: f_out['PCT_NAT_PFT'][:, :, self.dom_pft] = 100 From 7770c9e05bfcf3ad81e780980d8c8abaf2bf8409 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 18 Jan 2022 11:39:07 -0700 Subject: [PATCH 114/223] removing overwrite --- python/ctsm/subset_data.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index a2c0ac567e..6ec7de3d41 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -136,13 +136,6 @@ def get_parser(): dest="uni_snow", required=False, ) - pt_parser.add_argument( - "--overwrite-to-single-pft", - help="Modify surface dataset to be 100%% one single PFT set by --dompft.", - action="store_true", - dest="overwrite_single_pft", - required=False, - ) pt_parser.add_argument( "--include-nonveg", help="Do not zero non-vegetation land units in the surface data.", @@ -163,7 +156,7 @@ def get_parser(): action="store", dest="dom_pft", type=int, - default=7, + default=None, ) pt_parser.add_argument( "--datm-from-tower", @@ -423,7 +416,6 @@ def subset_point(args, file_dict: dict): create_landuse = args.create_landuse, create_datm = args.create_datm, create_user_mods = args.create_user_mods, - overwrite_single_pft = args.overwrite_single_pft, dom_pft = args.dom_pft, include_nonveg = args.include_nonveg, uni_snow = args.uni_snow, From b6ee37710825be8646687790e3a64d0ab58355e1 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 18 Jan 2022 12:25:39 -0700 Subject: [PATCH 115/223] fixing messege when no args provided. --- python/ctsm/subset_data.py | 43 +++++++++++++++++++++++--------------- 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 6ec7de3d41..9db5c6a9ab 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -504,41 +504,50 @@ def main(): single point. """ + # --------------------------------- # # add logging flags from ctsm_logging setup_logging_pre_config() parser = get_parser() args = parser.parse_args() + + # --------------------------------- # + # print help and exit when no option is chosen + if args.run_type != "point" and args.run_type != "region": + err_msg = textwrap.dedent('''\ + \n ------------------------------------ + \n Must supply a positional argument: 'point' or 'region'. + \n See ./subset_data --help for more help. + ''' + ) + raise parser.error(err_msg) + + if not any([args.create_surfdata, args.create_domain, args.create_landuse, args.create_datm]): + err_msg = textwrap.dedent('''\ + \n ------------------------------------ + \n Must supply one of: + \n --create-surface \n --create-landuse \n --create-datm \n --create-domain \n + \n See ./subset_data --help for more help. + ''' + ) + raise parser.error(err_msg) + + # --------------------------------- # + # process logging args (i.e. debug and verbose) process_logging_args(args) + # --------------------------------- # # parse defaults file cesmroot = path_to_ctsm_root() defaults = configparser.ConfigParser() defaults.read(os.path.join(cesmroot, "tools/site_and_regional", DEFAULTS_FILE)) # --------------------------------- # - myname = getuser() pwd = os.getcwd() logger.info("User = %s", myname) logger.info("Current directory = %s", pwd) # --------------------------------- # - - # print help and exit when no option is chosen - if args.run_type != "point" and args.run_type != "region": - err_msg = ( - "Must supply a positional argument: 'point' or 'region'. \n" - "See ./subset_data --help for more help." - ) - abort(err_msg) - if not any([args.create_surfdata, args.create_domain, args.create_landuse, args.create_datm]): - err_msg = ( - "Must supply one of: \n", - " --create-surface \n --create-landuse \n --create-datm \n --create-domain \n " - "See ./subset_data --help for more help." - ) - abort(err_msg) - # create files and folders necessary and return dictionary of file/folder locations file_dict = setup_files(args, defaults, cesmroot) From ca2e2977f8c56d2e1abbaa6dc9038f19236e5ccd Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 18 Jan 2022 14:06:14 -0700 Subject: [PATCH 116/223] ability to overwrite when dom_pft is higher than 16. --- .../site_and_regional/single_point_case.py | 16 ++++++++- python/ctsm/subset_data.py | 35 ++++++++++++++----- 2 files changed, 41 insertions(+), 10 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index a8502abb37..6eac0c4390 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -44,6 +44,8 @@ class SinglePointCase(BaseCase): flag for creating user mods directories and files dom_pft : int dominant pft type for this single point (None if not specified) + num_pft : int + total number of pfts for surface dataset (if crop 78 pft, else 16 pft) zero_nonveg_landunits : bool flag for setting all non-vegetation landunits to zero uni_snow : bool @@ -90,6 +92,7 @@ def __init__( create_datm, create_user_mods, dom_pft, + num_pft, include_nonveg, uni_snow, cap_saturation, @@ -101,12 +104,15 @@ def __init__( self.plon = plon self.site_name = site_name self.dom_pft = dom_pft + self.num_pft = num_pft self.include_nonveg = include_nonveg self.uni_snow = uni_snow self.cap_saturation = cap_saturation self.out_dir = out_dir self.create_tag() + self.check_dom_pft () + def create_tag(self): """ Create a tag for single point which is the site name @@ -117,6 +123,14 @@ def create_tag(self): else: self.tag = "{}_{}".format(str(self.plon), str(self.plat)) + def check_dom_pft (self): + """ + A function to compare dom_pft and num_pft + """ + if self.dom_pft: + print (type(self.dom_pft)) + + def create_domain_at_point(self, indir, file): """ Create domain file for this SinglePointCase class. @@ -227,7 +241,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # modify surface data properties if self.dom_pft is not None: f_out["PCT_NAT_PFT"][:, :, :] = 0 - if self.dom_pft < 16: + if self.dom_pft < self.num_pft: f_out['PCT_NAT_PFT'][:, :, self.dom_pft] = 100 if not self.include_nonveg: f_out["PCT_NATVEG"][:, :] = 100 diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 9db5c6a9ab..5a2bd6394d 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -338,6 +338,25 @@ def setup_user_mods(user_mods_dir, cesmroot): for line in base_file: user_file.write(line) +def determine_num_pft (crop): + """ + A simple function to determine the number of pfts. + + Args: + crop (bool): crop flag denoting if we are using crop + + Raises: + + Returns: + num_pft (int) : number of pfts for surface dataset + """ + if crop: + num_pft = "78" + else: + num_pft = "16" + logger.debug("crop_flag = %s => num_pft = %s", crop.__str__(), num_pft) + return num_pft + def setup_files(args, defaults, cesmroot): """ @@ -363,15 +382,10 @@ def setup_files(args, defaults, cesmroot): logger.info("dir_output_datm: %s", os.path.join(args.out_dir, dir_output_datm)) # if the crop flag is on - we need to use a different land use and surface data file - if args.crop_flag: - num_pft = "78" - fsurf_in = defaults.get("surfdat", "surfdat_78pft") - fluse_in = defaults.get("landuse", "landuse_78pft") - else: - num_pft = "16" - fsurf_in = defaults.get("surfdat", "surfdat_16pft") - fluse_in = defaults.get("landuse", "landuse_16pft") - logger.debug("crop_flag = %s => num_pft = %s", args.crop_flag.__str__(), num_pft) + num_pft = determine_num_pft(args.crop_flag) + + fsurf_in = defaults.get("surfdat", "surfdat_"+num_pft+"pft") + fluse_in = defaults.get("landuse", "landuse_"+num_pft+"pft") file_dict = {'main_dir': defaults.get("main", "clmforcingindir"), 'fdomain_in': defaults.get("domain", "file"), @@ -406,6 +420,8 @@ def subset_point(args, file_dict: dict): logger.info("----------------------------------------------------------------------------") logger.info("This script extracts a single point from the global CTSM datasets.") + num_pft = int(determine_num_pft(args.crop_flag)) + # -- Create SinglePoint Object single_point = SinglePointCase( plat = args.plat, @@ -417,6 +433,7 @@ def subset_point(args, file_dict: dict): create_datm = args.create_datm, create_user_mods = args.create_user_mods, dom_pft = args.dom_pft, + num_pft = num_pft, include_nonveg = args.include_nonveg, uni_snow = args.uni_snow, cap_saturation = args.cap_saturation, From b606af246857ea354431f0a9914efb8311c75807 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 25 Jan 2022 13:00:38 -0700 Subject: [PATCH 117/223] modify_fsurdat: Add dom_cft as alternative user-choice to dom_nat_pft --- .../ctsm/modify_fsurdat/fsurdat_modifier.py | 10 +++ python/ctsm/modify_fsurdat/modify_fsurdat.py | 73 ++++++++++++++++--- tools/modify_fsurdat/modify_template.cfg | 3 + 3 files changed, 75 insertions(+), 11 deletions(-) diff --git a/python/ctsm/modify_fsurdat/fsurdat_modifier.py b/python/ctsm/modify_fsurdat/fsurdat_modifier.py index 76b2374a05..39fc9f12a5 100644 --- a/python/ctsm/modify_fsurdat/fsurdat_modifier.py +++ b/python/ctsm/modify_fsurdat/fsurdat_modifier.py @@ -70,6 +70,10 @@ def fsurdat_modifier(cfg_path): item='dom_nat_pft', file_path=cfg_path, allowed_values=range(15), # integers from 0 to 14 convert_to_type=int, can_be_unset=True) + dom_cft = get_config_value(config=config, section=section, + item='dom_cft', file_path=cfg_path, + allowed_values=range(15, 79), # integers from 15 to 78 + convert_to_type=int, can_be_unset=True) lai = get_config_value(config=config, section=section, item='lai', file_path=cfg_path, is_list=True, @@ -132,6 +136,12 @@ def fsurdat_modifier(cfg_path): if zero_nonveg: modify_fsurdat.zero_nonveg() + # The set_dom_cft call follows zero_nonveg because it modifies PCT_NATVEG + # and PCT_CROP in the user-defined rectangle + if dom_cft is not None and dom_nat_pft is None: + modify_fsurdat.set_dom_cft(dom_cft=dom_cft, + lai=lai, sai=sai, + hgt_top=hgt_top, hgt_bot=hgt_bot) # ---------------------------------------------- # Output the now modified CTSM surface data file # ---------------------------------------------- diff --git a/python/ctsm/modify_fsurdat/modify_fsurdat.py b/python/ctsm/modify_fsurdat/modify_fsurdat.py index bf1a5e8c9b..3f9e319950 100644 --- a/python/ctsm/modify_fsurdat/modify_fsurdat.py +++ b/python/ctsm/modify_fsurdat/modify_fsurdat.py @@ -25,7 +25,7 @@ def __init__(self, my_data, lon_1, lon_2, lat_1, lat_2, landmask_file): self.file = my_data - self.not_rectangle = self._get_not_rectangle( + self.rectangle = self._get_rectangle( lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, longxy=self.file.LONGXY, latixy=self.file.LATIXY) @@ -34,8 +34,9 @@ def __init__(self, my_data, lon_1, lon_2, lat_1, lat_2, landmask_file): # overwrite self.not_rectangle with data from # user-specified .nc file in the .cfg file self._landmask_file = xr.open_dataset(landmask_file) - rectangle = self._landmask_file.landmask - self.not_rectangle = np.logical_not(rectangle) + self.rectangle = self._landmask_file.landmask + + self.not_rectangle = np.logical_not(self.rectangle) @classmethod @@ -47,7 +48,7 @@ def init_from_file(cls, fsurdat_in, lon_1, lon_2, lat_1, lat_2, landmask_file): @staticmethod - def _get_not_rectangle(lon_1, lon_2, lat_1, lat_2, longxy, latixy): + def _get_rectangle(lon_1, lon_2, lat_1, lat_2, longxy, latixy): """ Description ----------- @@ -86,7 +87,7 @@ def _get_not_rectangle(lon_1, lon_2, lat_1, lat_2, longxy, latixy): rectangle = np.logical_and(union_1, union_2) not_rectangle = np.logical_not(rectangle) - return not_rectangle + return rectangle def write_output(self, fsurdat_in, fsurdat_out): @@ -165,26 +166,76 @@ def set_dom_nat_pft(self, dom_nat_pft, lai, sai, hgt_top, hgt_bot): 'MONTHLY_HEIGHT_BOT': hgt_bot} for var, val in vars_4d.items(): if val is not None: - self.set_lai_sai_hgts(dom_nat_pft=dom_nat_pft, + self.set_lai_sai_hgts(dom_plant=dom_nat_pft, + var=var, val=val) + + + def set_dom_cft(self, dom_cft, lai, sai, hgt_top, hgt_bot): + """ + Description + ----------- + In rectangle selected by user (or default -90 to 90 and 0 to 360), + replace fsurdat file's PCT_CFT with: + - 100 for dom_cft selected by user + - 0 for all other PFTs/CFTs + If user has specified lai, sai, hgt_top, hgt_bot, replace these with + values selected by the user for dom_cft + + Arguments + --------- + dom_cft: + (int) User's entry of CFT to be set to 100% everywhere + lai: + (float) User's entry of MONTHLY_LAI for their dom_cft + sai: + (float) User's entry of MONTHLY_SAI for their dom_cft + hgt_top: + (float) User's entry of MONTHLY_HEIGHT_TOP for their dom_cft + hgt_bot: + (float) User's entry of MONTHLY_HEIGHT_BOT for their dom_cft + """ + + # Add PCT_NATVEG to PCT_CROP in the rectangle; remove from PCT_NATVEG + self.file['PCT_CROP'] = \ + self.file['PCT_CROP'] + \ + self.file['PCT_NATVEG'].where(self.rectangle, other=0) + self.setvar_lev0('PCT_NATVEG', 0) + + for cft in self.file.cft: + cft_local = cft - (max(self.file.natpft) + 1) + # initialize 3D variable; set outside the loop below + self.setvar_lev1('PCT_CFT', val=0, lev1_dim=cft_local) + + # set 3D variable + self.setvar_lev1('PCT_CFT', val=100, lev1_dim=dom_cft-(max(self.file.natpft)+1)) + + # dictionary of 4d variables to loop over + vars_4d = {'MONTHLY_LAI': lai, + 'MONTHLY_SAI': sai, + 'MONTHLY_HEIGHT_TOP': hgt_top, + 'MONTHLY_HEIGHT_BOT': hgt_bot} + for var, val in vars_4d.items(): + if val is not None: + self.set_lai_sai_hgts(dom_plant=dom_cft, var=var, val=val) - def set_lai_sai_hgts(self, dom_nat_pft, var, val): + def set_lai_sai_hgts(self, dom_plant, var, val): """ Description ----------- If user has specified lai, sai, hgt_top, hgt_bot, replace these with - values selected by the user for dom_nat_pft. Else do nothing. + values selected by the user for dom_plant. Else do nothing. """ - if dom_nat_pft == 0: # bare soil: var must equal 0 + if dom_plant == 0: # bare soil: var must equal 0 val = [0] * 12 if len(val) != 12: errmsg = 'Error: Variable should have exactly 12 ' \ 'entries in the configure file: ' + var abort(errmsg) for mon in self.file.time - 1: # loop over 12 months - # set 4D variable to value for dom_nat_pft - self.setvar_lev2(var, val[int(mon)], lev1_dim=dom_nat_pft, + # set 4D variable to value for dom_plant + self.setvar_lev2(var, val[int(mon)], lev1_dim=dom_plant, lev2_dim=mon) diff --git a/tools/modify_fsurdat/modify_template.cfg b/tools/modify_fsurdat/modify_template.cfg index 56e8221635..e5efa41ced 100644 --- a/tools/modify_fsurdat/modify_template.cfg +++ b/tools/modify_fsurdat/modify_template.cfg @@ -60,6 +60,9 @@ landmask_file = UNSET # If idealized = True and dom_nat_pft = UNSET, the latter defaults to 0 # (bare soil). Valid values 0 to 14 (int). dom_nat_pft = UNSET +# Crop (CFT) to be set to 100% according to user-defined mask. +# If dom_nat_pft >= 0, dom_cft defaults to UNSET. Valid values 15 to 78 (int). +dom_cft = UNSET # LAI, SAI, HEIGHT_TOP, and HEIGHT_BOT values by month for dom_nat_pft # If dom_nat_pft = 0, the next four default to 0 (space-delimited list From 7290331f6b3a8acd6fb02b724e831658f934922e Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Tue, 25 Jan 2022 15:04:09 -0700 Subject: [PATCH 118/223] subset_data accepts pct_pft. --- python/ctsm/subset_data.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 5a2bd6394d..c00ad40ccf 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -152,11 +152,21 @@ def get_parser(): ) pt_parser.add_argument( "--dompft", - help="Dominant PFT if we set the grid to 100%% one PFT [default: %(default)s].", + help="Dominant PFT(s): if we set the grid to 100%% one or multiple PFTs [default: %(default)s].", action="store", dest="dom_pft", type=int, default=None, + nargs='*', + ) + pt_parser.add_argument( + "--pctpft", + help="Percetages of each pft (set by --dompft) on the land unit.", + action="store", + dest="pct_pft", + type=float, + default=None, + nargs='*', ) pt_parser.add_argument( "--datm-from-tower", @@ -433,6 +443,7 @@ def subset_point(args, file_dict: dict): create_datm = args.create_datm, create_user_mods = args.create_user_mods, dom_pft = args.dom_pft, + pct_pft = args.pct_pft, num_pft = num_pft, include_nonveg = args.include_nonveg, uni_snow = args.uni_snow, From 00c8ba9d360aa1a257e2f8b1ad0cd10f0cfe1885 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 25 Jan 2022 18:58:36 -0700 Subject: [PATCH 119/223] Revisions in response to Erik's code review - Removed a bunch of "magic" numbers - Consolidated two options (dom_nat_pft and dom_cft) into one (dom_plant) - Not in Erik's review, but I added some new logger.info lines to indicate the progress of a run when using --verbose --- .../ctsm/modify_fsurdat/fsurdat_modifier.py | 48 ++++---- python/ctsm/modify_fsurdat/modify_fsurdat.py | 110 ++++++------------ tools/modify_fsurdat/fsurdat_modifier | 1 + tools/modify_fsurdat/modify_template.cfg | 15 +-- 4 files changed, 69 insertions(+), 105 deletions(-) diff --git a/python/ctsm/modify_fsurdat/fsurdat_modifier.py b/python/ctsm/modify_fsurdat/fsurdat_modifier.py index 39fc9f12a5..46365999fc 100644 --- a/python/ctsm/modify_fsurdat/fsurdat_modifier.py +++ b/python/ctsm/modify_fsurdat/fsurdat_modifier.py @@ -65,14 +65,15 @@ def fsurdat_modifier(cfg_path): landmask_file = get_config_value(config=config, section=section, item='landmask_file', file_path=cfg_path, can_be_unset=True) + # Create ModifyFsurdat object + modify_fsurdat = ModifyFsurdat.init_from_file(fsurdat_in, + lnd_lon_1, lnd_lon_2, lnd_lat_1, lnd_lat_2, landmask_file) + # not required: user may set these in the .cfg file - dom_nat_pft = get_config_value(config=config, section=section, - item='dom_nat_pft', file_path=cfg_path, - allowed_values=range(15), # integers from 0 to 14 - convert_to_type=int, can_be_unset=True) - dom_cft = get_config_value(config=config, section=section, - item='dom_cft', file_path=cfg_path, - allowed_values=range(15, 79), # integers from 15 to 78 + max_pft = int(max(modify_fsurdat.file.lsmpft)) + dom_plant = get_config_value(config=config, section=section, + item='dom_plant', file_path=cfg_path, + allowed_values=range(max_pft + 1), # integers from 0 to max_pft convert_to_type=int, can_be_unset=True) lai = get_config_value(config=config, section=section, item='lai', @@ -88,9 +89,10 @@ def fsurdat_modifier(cfg_path): item='hgt_bot', file_path=cfg_path, is_list=True, convert_to_type=float, can_be_unset=True) + max_soil_color = int(modify_fsurdat.file.mxsoil_color) soil_color = get_config_value(config=config, section=section, item='soil_color', file_path=cfg_path, - allowed_values=range(1, 21), # integers from 1 to 20 + allowed_values=range(1, max_soil_color + 1), # 1 to max_soil_color convert_to_type=int, can_be_unset=True) std_elev = get_config_value(config=config, section=section, @@ -100,10 +102,6 @@ def fsurdat_modifier(cfg_path): item='max_sat_area', file_path=cfg_path, convert_to_type=float, can_be_unset=True) - # Create ModifyFsurdat object - modify_fsurdat = ModifyFsurdat.init_from_file(fsurdat_in, - lnd_lon_1, lnd_lon_2, lnd_lat_1, lnd_lat_2, landmask_file) - # ------------------------------ # modify surface data properties # ------------------------------ @@ -116,32 +114,34 @@ def fsurdat_modifier(cfg_path): if idealized: modify_fsurdat.set_idealized() # set 2D variables # set 3D and 4D variables pertaining to natural vegetation - modify_fsurdat.set_dom_nat_pft(dom_nat_pft=0, lai=[], sai=[], - hgt_top=[], hgt_bot=[]) - - if dom_nat_pft is not None: # overwrite "idealized" value - modify_fsurdat.set_dom_nat_pft(dom_nat_pft=dom_nat_pft, - lai=lai, sai=sai, - hgt_top=hgt_top, hgt_bot=hgt_bot) + modify_fsurdat.set_dom_plant(dom_plant=0, lai=[], sai=[], + hgt_top=[], hgt_bot=[]) + logger.info('idealized complete') if max_sat_area is not None: # overwrite "idealized" value modify_fsurdat.setvar_lev0('FMAX', max_sat_area) + logger.info('max_sat_area complete') if std_elev is not None: # overwrite "idealized" value modify_fsurdat.setvar_lev0('STD_ELEV', std_elev) + logger.info('std_elev complete') if soil_color is not None: # overwrite "idealized" value modify_fsurdat.setvar_lev0('SOIL_COLOR', soil_color) + logger.info('soil_color complete') if zero_nonveg: modify_fsurdat.zero_nonveg() + logger.info('zero_nonveg complete') - # The set_dom_cft call follows zero_nonveg because it modifies PCT_NATVEG + # The set_dom_plant call follows zero_nonveg because it modifies PCT_NATVEG # and PCT_CROP in the user-defined rectangle - if dom_cft is not None and dom_nat_pft is None: - modify_fsurdat.set_dom_cft(dom_cft=dom_cft, - lai=lai, sai=sai, - hgt_top=hgt_top, hgt_bot=hgt_bot) + if dom_plant is not None: + modify_fsurdat.set_dom_plant(dom_plant=dom_plant, + lai=lai, sai=sai, + hgt_top=hgt_top, hgt_bot=hgt_bot) + logger.info('dom_plant complete') + # ---------------------------------------------- # Output the now modified CTSM surface data file # ---------------------------------------------- diff --git a/python/ctsm/modify_fsurdat/modify_fsurdat.py b/python/ctsm/modify_fsurdat/modify_fsurdat.py index 3f9e319950..4197fb5aaf 100644 --- a/python/ctsm/modify_fsurdat/modify_fsurdat.py +++ b/python/ctsm/modify_fsurdat/modify_fsurdat.py @@ -42,7 +42,7 @@ def __init__(self, my_data, lon_1, lon_2, lat_1, lat_2, landmask_file): @classmethod def init_from_file(cls, fsurdat_in, lon_1, lon_2, lat_1, lat_2, landmask_file): """Initialize a ModifyFsurdat object from file fsurdat_in""" - logger.info( 'Opening fsurdat_in file to be modified: %s', fsurdat_in) + logger.info('Opening fsurdat_in file to be modified: %s', fsurdat_in) my_file = xr.open_dataset(fsurdat_in) return cls(my_file, lon_1, lon_2, lat_1, lat_2, landmask_file) @@ -128,86 +128,52 @@ def write_output(self, fsurdat_in, fsurdat_out): self.file.close() - def set_dom_nat_pft(self, dom_nat_pft, lai, sai, hgt_top, hgt_bot): + def set_dom_plant(self, dom_plant, lai, sai, hgt_top, hgt_bot): """ Description ----------- In rectangle selected by user (or default -90 to 90 and 0 to 360), - replace fsurdat file's PCT_NAT_PFT with: - - 100 for dom_nat_pft selected by user - - 0 for all other non-crop PFTs - If user has specified lai, sai, hgt_top, hgt_bot, replace these with - values selected by the user for dom_nat_pft - - Arguments - --------- - dom_nat_pft: - (int) User's entry of PFT to be set to 100% everywhere - lai: - (float) User's entry of MONTHLY_LAI for their dom_nat_pft - sai: - (float) User's entry of MONTHLY_SAI for their dom_nat_pft - hgt_top: - (float) User's entry of MONTHLY_HEIGHT_TOP for their dom_nat_pft - hgt_bot: - (float) User's entry of MONTHLY_HEIGHT_BOT for their dom_nat_pft - """ - - for pft in self.file.natpft: - # initialize 3D variable; set outside the loop below - self.setvar_lev1('PCT_NAT_PFT', val=0, lev1_dim=pft) - # set 3D variable value for dom_nat_pft - self.setvar_lev1('PCT_NAT_PFT', val=100, lev1_dim=dom_nat_pft) - - # dictionary of 4d variables to loop over - vars_4d = {'MONTHLY_LAI': lai, - 'MONTHLY_SAI': sai, - 'MONTHLY_HEIGHT_TOP': hgt_top, - 'MONTHLY_HEIGHT_BOT': hgt_bot} - for var, val in vars_4d.items(): - if val is not None: - self.set_lai_sai_hgts(dom_plant=dom_nat_pft, - var=var, val=val) - - - def set_dom_cft(self, dom_cft, lai, sai, hgt_top, hgt_bot): - """ - Description - ----------- - In rectangle selected by user (or default -90 to 90 and 0 to 360), - replace fsurdat file's PCT_CFT with: - - 100 for dom_cft selected by user + replace fsurdat file's PCT_NAT_PFT or PCT_CFT with: + - 100 for dom_plant selected by user - 0 for all other PFTs/CFTs If user has specified lai, sai, hgt_top, hgt_bot, replace these with - values selected by the user for dom_cft + values selected by the user for dom_plant Arguments --------- - dom_cft: - (int) User's entry of CFT to be set to 100% everywhere + dom_plant: + (int) User's entry of PFT/CFT to be set to 100% everywhere lai: - (float) User's entry of MONTHLY_LAI for their dom_cft + (float) User's entry of MONTHLY_LAI for their dom_plant sai: - (float) User's entry of MONTHLY_SAI for their dom_cft + (float) User's entry of MONTHLY_SAI for their dom_plant hgt_top: - (float) User's entry of MONTHLY_HEIGHT_TOP for their dom_cft + (float) User's entry of MONTHLY_HEIGHT_TOP for their dom_plant hgt_bot: - (float) User's entry of MONTHLY_HEIGHT_BOT for their dom_cft + (float) User's entry of MONTHLY_HEIGHT_BOT for their dom_plant """ - # Add PCT_NATVEG to PCT_CROP in the rectangle; remove from PCT_NATVEG - self.file['PCT_CROP'] = \ - self.file['PCT_CROP'] + \ - self.file['PCT_NATVEG'].where(self.rectangle, other=0) - self.setvar_lev0('PCT_NATVEG', 0) - - for cft in self.file.cft: - cft_local = cft - (max(self.file.natpft) + 1) - # initialize 3D variable; set outside the loop below - self.setvar_lev1('PCT_CFT', val=0, lev1_dim=cft_local) - - # set 3D variable - self.setvar_lev1('PCT_CFT', val=100, lev1_dim=dom_cft-(max(self.file.natpft)+1)) + # If dom_plant is a cft, add PCT_NATVEG to PCT_CROP in the rectangle + # and remove same from PCT_NATVEG, i.e. set PCT_NATVEG = 0. + if dom_plant > max(self.file.natpft): # dom_plant is a cft (crop) + self.file['PCT_CROP'] = \ + self.file['PCT_CROP'] + \ + self.file['PCT_NATVEG'].where(self.rectangle, other=0) + self.setvar_lev0('PCT_NATVEG', 0) + + for cft in self.file.cft: + cft_local = cft - (max(self.file.natpft) + 1) + # initialize 3D variable; set outside the loop below + self.setvar_lev1('PCT_CFT', val=0, lev1_dim=cft_local) + + # set 3D variable + self.setvar_lev1('PCT_CFT', val=100, lev1_dim=dom_plant-(max(self.file.natpft)+1)) + else: # dom_plant is a pft (not a crop) + for pft in self.file.natpft: + # initialize 3D variable; set outside the loop below + self.setvar_lev1('PCT_NAT_PFT', val=0, lev1_dim=pft) + # set 3D variable value for dom_plant + self.setvar_lev1('PCT_NAT_PFT', val=100, lev1_dim=dom_plant) # dictionary of 4d variables to loop over vars_4d = {'MONTHLY_LAI': lai, @@ -216,8 +182,7 @@ def set_dom_cft(self, dom_cft, lai, sai, hgt_top, hgt_bot): 'MONTHLY_HEIGHT_BOT': hgt_bot} for var, val in vars_4d.items(): if val is not None: - self.set_lai_sai_hgts(dom_plant=dom_cft, - var=var, val=val) + self.set_lai_sai_hgts(dom_plant=dom_plant, var=var, val=val) def set_lai_sai_hgts(self, dom_plant, var, val): @@ -227,11 +192,12 @@ def set_lai_sai_hgts(self, dom_plant, var, val): If user has specified lai, sai, hgt_top, hgt_bot, replace these with values selected by the user for dom_plant. Else do nothing. """ + months = int(max(self.file.time)) # 12 months if dom_plant == 0: # bare soil: var must equal 0 - val = [0] * 12 - if len(val) != 12: - errmsg = 'Error: Variable should have exactly 12 ' \ - 'entries in the configure file: ' + var + val = [0] * months + if len(val) != months: + errmsg = 'Error: Variable should have exactly ' + months + \ + ' entries in the configure file: ' + var abort(errmsg) for mon in self.file.time - 1: # loop over 12 months # set 4D variable to value for dom_plant diff --git a/tools/modify_fsurdat/fsurdat_modifier b/tools/modify_fsurdat/fsurdat_modifier index fdf3d48756..8c2031b548 100755 --- a/tools/modify_fsurdat/fsurdat_modifier +++ b/tools/modify_fsurdat/fsurdat_modifier @@ -37,6 +37,7 @@ ncar_pylib contains all the arguments needed by the script. 3) Run the script ./fsurdat_modifier pointing to the copied/modified .cfg file, e.g. modify_users_copy.cfg +4) Use the --verbose option to see progress output on your screen Example ------- diff --git a/tools/modify_fsurdat/modify_template.cfg b/tools/modify_fsurdat/modify_template.cfg index e5efa41ced..6b18cedc36 100644 --- a/tools/modify_fsurdat/modify_template.cfg +++ b/tools/modify_fsurdat/modify_template.cfg @@ -56,16 +56,13 @@ lnd_lon_2 = 360 # user-defined mask in a file, as alternative to setting lat/lon values landmask_file = UNSET -# Non-crop PFT to be set to 100% according to user-defined mask. -# If idealized = True and dom_nat_pft = UNSET, the latter defaults to 0 -# (bare soil). Valid values 0 to 14 (int). -dom_nat_pft = UNSET -# Crop (CFT) to be set to 100% according to user-defined mask. -# If dom_nat_pft >= 0, dom_cft defaults to UNSET. Valid values 15 to 78 (int). -dom_cft = UNSET +# PFT/CFT to be set to 100% according to user-defined mask. +# If idealized = True and dom_plant = UNSET, the latter defaults to 0 +# (bare soil). Valid values 0 to 78 (int). +dom_plant = UNSET -# LAI, SAI, HEIGHT_TOP, and HEIGHT_BOT values by month for dom_nat_pft -# If dom_nat_pft = 0, the next four default to 0 (space-delimited list +# LAI, SAI, HEIGHT_TOP, and HEIGHT_BOT values by month for dom_plant +# If dom_plant = 0, the next four default to 0 (space-delimited list # of floats without brackets). lai = UNSET sai = UNSET From 76b467812228a1768f1e45508a6ec157dc54b0c6 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 26 Jan 2022 14:26:48 -0700 Subject: [PATCH 120/223] first draft of multiple pfts --- .../site_and_regional/single_point_case.py | 195 ++++++++++++++++-- 1 file changed, 183 insertions(+), 12 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 6eac0c4390..91afbf156d 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -6,6 +6,7 @@ # -- Import Python Standard Libraries import logging import os +import argparse # -- 3rd party libraries import numpy as np @@ -17,6 +18,8 @@ logger = logging.getLogger(__name__) +NAT_PFT = 15 +MAX_PFT = 78 class SinglePointCase(BaseCase): """ @@ -46,6 +49,8 @@ class SinglePointCase(BaseCase): dominant pft type for this single point (None if not specified) num_pft : int total number of pfts for surface dataset (if crop 78 pft, else 16 pft) + pct_pft : int + weight or percentage of each pft. zero_nonveg_landunits : bool flag for setting all non-vegetation landunits to zero uni_snow : bool @@ -92,6 +97,7 @@ def __init__( create_datm, create_user_mods, dom_pft, + pct_pft, num_pft, include_nonveg, uni_snow, @@ -104,14 +110,17 @@ def __init__( self.plon = plon self.site_name = site_name self.dom_pft = dom_pft + self.pct_pft = pct_pft self.num_pft = num_pft self.include_nonveg = include_nonveg self.uni_snow = uni_snow self.cap_saturation = cap_saturation self.out_dir = out_dir - self.create_tag() + self.create_tag() self.check_dom_pft () + self.check_nonveg () + self.check_pct_pft () def create_tag(self): """ @@ -125,11 +134,146 @@ def create_tag(self): def check_dom_pft (self): """ - A function to compare dom_pft and num_pft + A function to sanity check values in dom_pft: + + - Compare dom_pft (values if more than one) with num_pft: + i.e. If dom_pft is 18 without crop it fails. + + - Check for mixed land-units: + If we have more than one dom_pft, they should be in the + same range. + e.g. If users specified multiple dom_pft, they should be + either in : + - 1-15 range + or + - 16-78 range + - give an error : mixed land units not possible. + + dom_pft in netcdf: 1-15 which tranlate to 0-14 + ------------- + Raises: + Error (ArgumentTypeError): + If any dom_pft is bigger than 78. + Error (ArgumentTypeError): + If any dom_pft is less than 1. + Error (ArgumentTypeError): + If mixed land units are chosen. + dom_pft values are both in range of 1-15 and 16-78. + + + """ + + if self.dom_pft is None: + logger.warning ("No dominant pft type is chosen. " + "If you want to choose a dominant pft type, please use --dompft flag.") + else: + min_dom_pft = min(self.dom_pft) + max_dom_pft = max(self.dom_pft) + + #-- check dom_pft vs num_pft + if max_dom_pft > self.num_pft : + err_msg = "Please use --crop flag when --dompft is above 16." + raise argparse.ArgumentTypeError(err_msg) + + #-- check dom_pft values should be between 1-78 + if min_dom_pft <1 or max_dom_pft >MAX_PFT: + err_msg = "values for --dompft should not be between 1 and 78." + raise argparse.ArgumentTypeError(err_msg) + + if min_dom_pft <=NAT_PFT and max_dom_pft >NAT_PFT: + err_msg = """ + \n + Subsetting using mixed land units is not possible. + Please make sure all --dompft values are in only + one of these ranges: + - 1-15 + - 16-78 + """ + raise argparse.ArgumentTypeError(err_msg) + + def check_nonveg (self): + """ + A function to check at least one of the following arguments is given: + --include-nonveg + --dompft DOMPFT + + Basically, this function raises an error + when zero out non veg land units (by default true) and not provide a dominant pft: + + The user can run ./subset_data using: + ./subset_data point --dom-pft + ./subset_data point --include-nonveg + ./subset_data point --dom-pft --include-nonveg + + But this will raise an error: + ./subset_data point + + By default include_nonveg = False, which means that it zeros out the non-veg landunits. + """ + + if not self.include_nonveg: + if self.dom_pft is None: + err_msg = """ + \n + By default, this will zero out non-veg land units. + To include non-veg land units, you need to specify --include-nonveg flag. + To zero-out non-veg land units, you need to specify --dompft. + + You should specify at least one of the following arguments: + --dompft DOMPFT + --include_nonveg + """ + raise argparse.ArgumentTypeError(err_msg) + + + def check_pct_pft (self): + """ + A function to error check pct_pft and calculate it if necessary. + + If the user gives dom_pft and pct_pft : + - Check if length of dom_pft and pct_pft matches. + For example, --dompft 8 --pctpft 0.4 0.6 should give an error. + + - Check if the sum of pct_pft is equal to 100% or 1. + For example, --dompft 8 15 --pctpft 0.6 0.9 should give an error. + + - If the sum of pct_pft is 1, convert it to % (multiply by 100) + + If the user gives one or more dom_pft but no pct_pft, assume equal pct_pft: + - pct_pft = 100 / number of given dom_pft + For example, if two dom_pft (s) are given, each of them is 50%. + """ - if self.dom_pft: - print (type(self.dom_pft)) + # -- if both dom_pft and pct_pft is given: + if self.dom_pft and self.pct_pft: + + # -- check if the same number of values are given + if len(self.dom_pft) != len(self.pct_pft): + err_msg = "Please provide the same number of values for --dompft and --pctpft." + raise argparse.ArgumentTypeError(err_msg) + + # -- check if the sum of pct_pft is equal to 1 or 100 + if sum(self.pct_pft)!= 1 and sum(self.pct_pft) != 100: + err_msg = "Sum of --pct_pft values should be equal to 1 or 100." + raise argparse.ArgumentTypeError(err_msg) + + # -- convert franction to percentage + if sum(self.pct_pft) == 1: + self.pct_pft = [pct * 100 for pct in self.pct_pft] + + # -- if the user did not give --pctpft at all (assume equal percentage) + elif self.dom_pft: + pct = 100/len(self.dom_pft) + self.pct_pft = [pct for pft in self.dom_pft] + + # -- if the user only gave --pctpft with no --dompft + elif self.pct_pft: + err_msg = " --pctpft is specfied without --dompft. Please specify your dominant pft by --dompft." + raise argparse.ArgumentTypeError (err_msg) + + logger.info (" - dominant pft(s) : %s",self.dom_pft) + logger.info (" - percentage of dominant pft(s) : %s",self.pct_pft) def create_domain_at_point(self, indir, file): """ @@ -237,19 +381,46 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # expand dimensions f_out = f_out.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) - - # modify surface data properties + #-- modify surface data properties if self.dom_pft is not None: - f_out["PCT_NAT_PFT"][:, :, :] = 0 - if self.dom_pft < self.num_pft: - f_out['PCT_NAT_PFT'][:, :, self.dom_pft] = 100 + #-- First initialize everything: + f_out["PCT_NAT_PFT"][:, :, 0] = 100 + #f_out["PCT_NATVEG"][:, :] = 0 + + f_out["PCT_CFT"][:, :, 0] = 100 + #f_out["PCT_CROP"][:, :] = 0 + + #-- loop over all dom_pft and pct_pft + zip_pfts = zip (self.dom_pft, self.pct_pft) + for dom_pft, pct_pft in zip_pfts: + if dom_pft <= NAT_PFT: + f_out['PCT_NAT_PFT'][:, :, dom_pft-1] = pct_pft + elif dom_pft > NAT_PFT: + dom_pft = dom_pft-NAT_PFT + f_out['PCT_CFT'][:, :, dom_pft-1] = pct_pft + + # ------------------------------- + # By default include_nonveg=False + # When we use --include-nonveg we turn it to True + if not self.include_nonveg: - f_out["PCT_NATVEG"][:, :] = 100 - f_out["PCT_CROP"][:, :] = 0 + logger.info ("Zeroing out non-vegetation land units in the surface data.") f_out["PCT_LAKE"][:, :] = 0.0 f_out["PCT_WETLAND"][:, :] = 0.0 - f_out["PCT_URBAN"][:, :, ] = 0.0 + f_out["PCT_URBAN"][:, :] = 0.0 f_out["PCT_GLACIER"][:, :] = 0.0 + + max_dom_pft = max(self.dom_pft) + if max_dom_pft <=NAT_PFT : + f_out["PCT_NATVEG"][:, :] = 100 + f_out["PCT_CROP"][:, :] = 0 + else: + f_out["PCT_NATVEG"][:, :] = 0 + f_out["PCT_CROP"][:, :] = 100 + + else: + logger.info ("You chose --include-nonveg --> Do not zero non-vegetation land units in the surface data.") + if self.uni_snow: f_out["STD_ELEV"][:, :] = 20.0 if self.cap_saturation: From 09b1d704b03237e58603a818d81d40e73c1a3d1e Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 26 Jan 2022 14:27:39 -0700 Subject: [PATCH 121/223] moving the error location. --- python/ctsm/site_and_regional/single_point_case.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 91afbf156d..c158c62369 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -170,16 +170,16 @@ def check_dom_pft (self): min_dom_pft = min(self.dom_pft) max_dom_pft = max(self.dom_pft) - #-- check dom_pft vs num_pft - if max_dom_pft > self.num_pft : - err_msg = "Please use --crop flag when --dompft is above 16." - raise argparse.ArgumentTypeError(err_msg) - #-- check dom_pft values should be between 1-78 if min_dom_pft <1 or max_dom_pft >MAX_PFT: err_msg = "values for --dompft should not be between 1 and 78." raise argparse.ArgumentTypeError(err_msg) + #-- check dom_pft vs num_pft + if max_dom_pft > self.num_pft : + err_msg = "Please use --crop flag when --dompft is above 16." + raise argparse.ArgumentTypeError(err_msg) + if min_dom_pft <=NAT_PFT and max_dom_pft >NAT_PFT: err_msg = """ \n From 7368664328625f13636d9652efae3aca607b3eca Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 26 Jan 2022 14:36:37 -0700 Subject: [PATCH 122/223] some modifications. --- python/ctsm/site_and_regional/single_point_case.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index c158c62369..642b8b9f0b 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -47,10 +47,10 @@ class SinglePointCase(BaseCase): flag for creating user mods directories and files dom_pft : int dominant pft type for this single point (None if not specified) - num_pft : int - total number of pfts for surface dataset (if crop 78 pft, else 16 pft) - pct_pft : int + pct_pft : list weight or percentage of each pft. + num_pft : list + total number of pfts for surface dataset (if crop 78 pft, else 16 pft) zero_nonveg_landunits : bool flag for setting all non-vegetation landunits to zero uni_snow : bool @@ -172,7 +172,7 @@ def check_dom_pft (self): #-- check dom_pft values should be between 1-78 if min_dom_pft <1 or max_dom_pft >MAX_PFT: - err_msg = "values for --dompft should not be between 1 and 78." + err_msg = "values for --dompft should be between 1 and 78." raise argparse.ArgumentTypeError(err_msg) #-- check dom_pft vs num_pft @@ -250,12 +250,12 @@ def check_pct_pft (self): # -- check if the same number of values are given if len(self.dom_pft) != len(self.pct_pft): - err_msg = "Please provide the same number of values for --dompft and --pctpft." + err_msg = "Please provide the same number of inputs for --dompft and --pctpft." raise argparse.ArgumentTypeError(err_msg) # -- check if the sum of pct_pft is equal to 1 or 100 if sum(self.pct_pft)!= 1 and sum(self.pct_pft) != 100: - err_msg = "Sum of --pct_pft values should be equal to 1 or 100." + err_msg = "Sum of --pctpft values should be equal to 1 or 100." raise argparse.ArgumentTypeError(err_msg) # -- convert franction to percentage From e3e511c844c0f56f1db5511e5d6eb9053599f2da Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 26 Jan 2022 15:12:46 -0700 Subject: [PATCH 123/223] adding tests for args_utils.py --- python/ctsm/args_utils.py | 2 +- python/ctsm/test/test_unit_args_utils.py | 60 ++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 python/ctsm/test/test_unit_args_utils.py diff --git a/python/ctsm/args_utils.py b/python/ctsm/args_utils.py index 98faf068f6..b96f6d1468 100644 --- a/python/ctsm/args_utils.py +++ b/python/ctsm/args_utils.py @@ -47,7 +47,7 @@ def plon_type(plon): plon_out (float): converted longitude between 0 and 360 """ plon = float(plon) - if plon < 0 or plon > 360: + if plon < -180 or plon > 360: raise argparse.ArgumentTypeError( "ERROR: Longitude should be between 0 and 360 or -180 and 180." ) diff --git a/python/ctsm/test/test_unit_args_utils.py b/python/ctsm/test/test_unit_args_utils.py new file mode 100644 index 0000000000..f0f3de6d6f --- /dev/null +++ b/python/ctsm/test/test_unit_args_utils.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 + +import os +import sys +import unittest +import argparse + +# -- add python/ctsm to path +_CTSM_PYTHON = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) +print (_CTSM_PYTHON) +sys.path.insert(1, _CTSM_PYTHON) + +from ctsm.args_utils import plon_type, plat_type +from ctsm import unit_testing + +# pylint: disable=invalid-name + +class TestArgsPlon(unittest.TestCase): + + # --between 0-360 + def test_plonType_positive(self): + result = plon_type(30) + self.assertEqual(result, 30.0) + + # --between -180-0 + def test_plonType_negative(self): + result = plon_type(-30) + self.assertEqual(result, 330.0) + + # -- > 360 + def test_plonType_outOfBounds_positive(self): + with self.assertRaisesRegex( + argparse.ArgumentTypeError, "Longitude.*should be between" + ): + _ = plon_type(360.5) + + # -- < -180 + def test_plonType_outOfBounds_negative(self): + with self.assertRaisesRegex( + argparse.ArgumentTypeError, "Longitude.*should be between" + ): + _ = plon_type(-200) + +class TestArgsPlat(unittest.TestCase): + def test_platType_outOfBounds_positive(self): + with self.assertRaisesRegex( + argparse.ArgumentTypeError, "Latitude.*should be between" + ): + _ = plat_type(91) + + def test_platType_outOfBounds_negative(self): + with self.assertRaisesRegex( + argparse.ArgumentTypeError, "Latitude.*should be between" + ): + _ = plat_type(-91) + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() From 37ff41934f1a42625f11923e01b8c2aeaa5ab113 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 26 Jan 2022 15:38:47 -0700 Subject: [PATCH 124/223] Updates for unit/sys tests to pass --- python/ctsm/test/test_sys_fsurdat_modifier.py | 4 ++-- python/ctsm/test/test_unit_modify_fsurdat.py | 21 ++++++++++++------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/python/ctsm/test/test_sys_fsurdat_modifier.py b/python/ctsm/test/test_sys_fsurdat_modifier.py index 7d2819261c..4f51ee68b5 100755 --- a/python/ctsm/test/test_sys_fsurdat_modifier.py +++ b/python/ctsm/test/test_sys_fsurdat_modifier.py @@ -116,8 +116,8 @@ def _create_config_file_complete(self): line = 'lnd_lon_1 = 295\n' elif re.match(r' *lnd_lon_2 *=', line): line = 'lnd_lon_2 = 300\n' - elif re.match(r' *dom_nat_pft *=', line): - line = 'dom_nat_pft = 1' + elif re.match(r' *dom_plant *=', line): + line = 'dom_plant = 1' elif re.match(r' *lai *=', line): line = 'lai = 0 1 2 3 4 5 5 4 3 2 1 0\n' elif re.match(r' *sai *=', line): diff --git a/python/ctsm/test/test_unit_modify_fsurdat.py b/python/ctsm/test/test_unit_modify_fsurdat.py index 19c53dac6a..33fe459fe6 100755 --- a/python/ctsm/test/test_unit_modify_fsurdat.py +++ b/python/ctsm/test/test_unit_modify_fsurdat.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """ -Unit tests for _get_not_rectangle +Unit tests for _get_rectangle """ import unittest @@ -21,7 +21,7 @@ class TestModifyFsurdat(unittest.TestCase): """Tests the setvar_lev functions and the - _get_not_rectangle function + _get_rectangle function """ def test_setvarLev(self): @@ -103,9 +103,10 @@ def test_getNotRectangle_lon1leLon2Lat1leLat2(self): lon_2 = 5 # lon_1 < lon_2 lat_1 = 6 lat_2 = 8 # lat_1 < lat_2 - not_rectangle = ModifyFsurdat._get_not_rectangle( + rectangle = ModifyFsurdat._get_rectangle( lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, longxy=longxy, latixy=latixy) + not_rectangle = np.logical_not(rectangle) compare = np.ones((rows,cols)) # assert this to confirm intuitive understanding of these matrices self.assertEqual(np.size(not_rectangle), np.size(compare)) @@ -140,9 +141,10 @@ def test_getNotRectangle_lon1leLon2Lat1gtLat2(self): lon_2 = 4 # lon_1 < lon_2 lat_1 = 4 lat_2 = 0 # lat_1 > lat_2 - not_rectangle = ModifyFsurdat._get_not_rectangle( + rectangle = ModifyFsurdat._get_rectangle( lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, longxy=longxy, latixy=latixy) + not_rectangle = np.logical_not(rectangle) compare = np.ones((rows,cols)) # assert this to confirm intuitive understanding of these matrices self.assertEqual(np.size(not_rectangle), np.size(compare)) @@ -178,9 +180,10 @@ def test_getNotRectangle_lon1gtLon2Lat1leLat2(self): lon_2 = 2 # lon_1 > lon_2 lat_1 = 2 lat_2 = 3 # lat_1 < lat_2 - not_rectangle = ModifyFsurdat._get_not_rectangle( + rectangle = ModifyFsurdat._get_rectangle( lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, longxy=longxy, latixy=latixy) + not_rectangle = np.logical_not(rectangle) compare = np.ones((rows,cols)) # assert this to confirm intuitive understanding of these matrices self.assertEqual(np.size(not_rectangle), np.size(compare)) @@ -216,9 +219,10 @@ def test_getNotRectangle_lon1gtLon2Lat1gtLat2(self): lon_2 = -6 # lon_1 > lon_2 lat_1 = 0 lat_2 = -3 # lat_1 > lat_2 - not_rectangle = ModifyFsurdat._get_not_rectangle( + rectangle = ModifyFsurdat._get_rectangle( lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, longxy=longxy, latixy=latixy) + not_rectangle = np.logical_not(rectangle) compare = np.ones((rows,cols)) # assert this to confirm intuitive understanding of these matrices self.assertEqual(np.size(not_rectangle), np.size(compare)) @@ -256,9 +260,10 @@ def test_getNotRectangle_lonsStraddle0deg(self): lon_2 = 5 # lon_1 > lon_2 lat_1 = -4 lat_2 = -6 # lat_1 > lat_2 - not_rectangle = ModifyFsurdat._get_not_rectangle( + rectangle = ModifyFsurdat._get_rectangle( lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, longxy=longxy, latixy=latixy) + not_rectangle = np.logical_not(rectangle) compare = np.ones((rows,cols)) # assert this to confirm intuitive understanding of these matrices self.assertEqual(np.size(not_rectangle), np.size(compare)) @@ -294,7 +299,7 @@ def test_getNotRectangle_latsOutOfBounds(self): lat_2 = 91 with self.assertRaisesRegex(SystemExit, "lat_1 and lat_2 need to be in the range -90 to 90"): - _ = ModifyFsurdat._get_not_rectangle( + _ = ModifyFsurdat._get_rectangle( lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, longxy=longxy, latixy=latixy) From 06106ee0a8415a060d1e40adadc35b516c4d5832 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 26 Jan 2022 16:07:22 -0700 Subject: [PATCH 125/223] Updates based on pylint or black recommendations --- python/ctsm/modify_fsurdat/modify_fsurdat.py | 1 - python/ctsm/test/test_sys_fsurdat_modifier.py | 16 ++++++++-------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/python/ctsm/modify_fsurdat/modify_fsurdat.py b/python/ctsm/modify_fsurdat/modify_fsurdat.py index 4197fb5aaf..9c5d71af9e 100644 --- a/python/ctsm/modify_fsurdat/modify_fsurdat.py +++ b/python/ctsm/modify_fsurdat/modify_fsurdat.py @@ -85,7 +85,6 @@ def _get_rectangle(lon_1, lon_2, lat_1, lat_2, longxy, latixy): # union rectangles overlap rectangle = np.logical_and(union_1, union_2) - not_rectangle = np.logical_not(rectangle) return rectangle diff --git a/python/ctsm/test/test_sys_fsurdat_modifier.py b/python/ctsm/test/test_sys_fsurdat_modifier.py index 4f51ee68b5..c7a6f380f5 100755 --- a/python/ctsm/test/test_sys_fsurdat_modifier.py +++ b/python/ctsm/test/test_sys_fsurdat_modifier.py @@ -87,25 +87,25 @@ def test_allInfo(self): def _create_config_file_minimal(self): - with open (self._cfg_file_path,'w') as cfg_out: - with open (self._cfg_template_path,'r') as cfg_in: + with open (self._cfg_file_path, 'w', encoding='utf-8') as cfg_out: + with open (self._cfg_template_path, 'r', encoding='utf-8') as cfg_in: for line in cfg_in: if re.match(r' *fsurdat_in *=', line): - line = 'fsurdat_in = {}'.format(self._fsurdat_in) + line = f'fsurdat_in = {self._fsurdat_in}' elif re.match(r' *fsurdat_out *=', line): - line = 'fsurdat_out = {}'.format(self._fsurdat_out) + line = f'fsurdat_out = {self._fsurdat_out}' cfg_out.write(line) def _create_config_file_complete(self): - with open (self._cfg_file_path,'w') as cfg_out: - with open (self._cfg_template_path,'r') as cfg_in: + with open (self._cfg_file_path, 'w', encoding='utf-8') as cfg_out: + with open (self._cfg_template_path, 'r', encoding='utf-8') as cfg_in: for line in cfg_in: if re.match(r' *fsurdat_in *=', line): - line = 'fsurdat_in = {}'.format(self._fsurdat_in) + line = f'fsurdat_in = {self._fsurdat_in}' elif re.match(r' *fsurdat_out *=', line): - line = 'fsurdat_out = {}'.format(self._fsurdat_out) + line = f'fsurdat_out = {self._fsurdat_out}' elif re.match(r' *idealized *=', line): line = 'idealized = True' elif re.match(r' *lnd_lat_1 *=', line): From 7332c33ed84e28e2776102a2a9605bd566fbf4cd Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 26 Jan 2022 16:14:51 -0700 Subject: [PATCH 126/223] adding tests + pylint cleanups. --- python/ctsm/args_utils.py | 8 +-- python/ctsm/config_utils.py | 19 ++++-- python/ctsm/subset_data.py | 3 +- python/ctsm/test/test_unit_args_utils.py | 34 ++++++++++- python/ctsm/test/test_unit_utils.py | 6 +- python/ctsm/utils.py | 77 +----------------------- 6 files changed, 56 insertions(+), 91 deletions(-) diff --git a/python/ctsm/args_utils.py b/python/ctsm/args_utils.py index b96f6d1468..1d8ee8d2fc 100644 --- a/python/ctsm/args_utils.py +++ b/python/ctsm/args_utils.py @@ -1,6 +1,7 @@ """ General-purpose utilities for handling command-line arguments and flags in ctsm python codes. +Types for command-lines error handling. """ import logging @@ -8,9 +9,7 @@ from ctsm.config_utils import lon_range_0_to_360 - -# Types for command-lines error handling: - +logger = logging.getLogger(__name__) def plat_type(plat): """ @@ -26,13 +25,12 @@ def plat_type(plat): plat_out (float): latitude in float """ plat_out = float(plat) - if (plat_out < -90) or (plat_out > 90): + if plat_out < -90 or plat_out > 90: raise argparse.ArgumentTypeError( "ERROR: Latitude should be between -90 and 90." ) return plat_out - def plon_type(plon): """ Function to define lon type for the parser and diff --git a/python/ctsm/config_utils.py b/python/ctsm/config_utils.py index 7729f2b060..41a023f375 100644 --- a/python/ctsm/config_utils.py +++ b/python/ctsm/config_utils.py @@ -4,11 +4,20 @@ """ import logging +import configparser from ctsm.utils import abort logger = logging.getLogger(__name__) +# This string is used in the out-of-the-box ctsm.cfg and modify.cfg files +# to denote a value that needs to be filled in +_CONFIG_PLACEHOLDER = "FILL_THIS_IN" +# This string is used in the out-of-the-box ctsm.cfg and modify.cfg files +# to denote a value that can be filled in, but doesn't absolutely need to be +_CONFIG_UNSET = "UNSET" + + def lon_range_0_to_360(lon_in): """ Description @@ -53,11 +62,11 @@ def get_config_value( """ try: val = config.get(section, item) - except NoSectionError: + except configparser.NoSectionError: abort( "ERROR: Config file {} must contain section '{}'".format(file_path, section) ) - except NoOptionError: + except configparser.NoOptionError: abort( "ERROR: Config file {} must contain item '{}' in section '{}'".format( file_path, item, section @@ -78,6 +87,7 @@ def get_config_value( can_be_unset=can_be_unset, allowed_values=allowed_values, ) + return val def _handle_config_value( @@ -129,6 +139,7 @@ def _handle_config_value( return var + def _convert_to_bool(var): """ Function for converting different forms of @@ -146,12 +157,10 @@ def _convert_to_bool(var): var_out (bool): Boolean value corresponding to the input. """ if var.lower() in ("yes", "true", "t", "y", "1"): - var_out = True + var_out = True elif var.lower() in ("no", "false", "f", "n", "0"): var_out = False else: raise ValueError("Boolean value expected. [true or false] or [y or n]") return var_out - - diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index c00ad40ccf..efc4d227b8 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -152,7 +152,8 @@ def get_parser(): ) pt_parser.add_argument( "--dompft", - help="Dominant PFT(s): if we set the grid to 100%% one or multiple PFTs [default: %(default)s].", + help="Dominant PFT(s): if we set the grid to 100%% one or multiple PFTs \ + [default: %(default)s].", action="store", dest="dom_pft", type=int, diff --git a/python/ctsm/test/test_unit_args_utils.py b/python/ctsm/test/test_unit_args_utils.py index f0f3de6d6f..e8a29e189f 100644 --- a/python/ctsm/test/test_unit_args_utils.py +++ b/python/ctsm/test/test_unit_args_utils.py @@ -1,35 +1,53 @@ #!/usr/bin/env python3 +""" +Unit tests for arg_utils.py function and types. + +You can run this by: + python -m unittest test_unit_args_utils.py +""" import os import sys import unittest import argparse -# -- add python/ctsm to path +# -- add python/ctsm to path (needed if we want to run the test stand-alone) _CTSM_PYTHON = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) -print (_CTSM_PYTHON) sys.path.insert(1, _CTSM_PYTHON) +#pylint: disable=wrong-import-position from ctsm.args_utils import plon_type, plat_type from ctsm import unit_testing # pylint: disable=invalid-name class TestArgsPlon(unittest.TestCase): + """ + Tests for plot_type in args_util.py + """ # --between 0-360 def test_plonType_positive(self): + """ + Test of positive plon between 0 and 360 + """ result = plon_type(30) self.assertEqual(result, 30.0) # --between -180-0 def test_plonType_negative(self): + """ + Test of negative plon between -180 and 0 + """ result = plon_type(-30) self.assertEqual(result, 330.0) # -- > 360 def test_plonType_outOfBounds_positive(self): + """ + Test of plon values greater than 360 + """ with self.assertRaisesRegex( argparse.ArgumentTypeError, "Longitude.*should be between" ): @@ -37,19 +55,31 @@ def test_plonType_outOfBounds_positive(self): # -- < -180 def test_plonType_outOfBounds_negative(self): + """ + Test of plon values smaller than -180 + """ with self.assertRaisesRegex( argparse.ArgumentTypeError, "Longitude.*should be between" ): _ = plon_type(-200) class TestArgsPlat(unittest.TestCase): + """ + Tests for plat_type in args_util.py + """ def test_platType_outOfBounds_positive(self): + """ + Test of plat_type bigger than 90 + """ with self.assertRaisesRegex( argparse.ArgumentTypeError, "Latitude.*should be between" ): _ = plat_type(91) def test_platType_outOfBounds_negative(self): + """ + Test of plat_type smaller than -90 + """ with self.assertRaisesRegex( argparse.ArgumentTypeError, "Latitude.*should be between" ): diff --git a/python/ctsm/test/test_unit_utils.py b/python/ctsm/test/test_unit_utils.py index ead0d8ce5a..cad2a7d1af 100755 --- a/python/ctsm/test/test_unit_utils.py +++ b/python/ctsm/test/test_unit_utils.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -"""Unit tests for utils +"""Unit tests for utils and config_utils """ import tempfile @@ -9,8 +9,8 @@ import os from ctsm import unit_testing -from ctsm.utils import (fill_template_file, lon_range_0_to_360, - _handle_config_value) +from ctsm.utils import fill_template_file +from ctsm.config_utils import lon_range_0_to_360, _handle_config_value # Allow names that pylint doesn't like, because otherwise I find it hard # to make readable unit test names diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index ac9d567761..70bef1d951 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -5,23 +5,12 @@ import sys import string import pdb -import subprocess from datetime import date from getpass import getuser -from configparser import NoSectionError, NoOptionError -from ctsm.path_utils import path_to_ctsm_root logger = logging.getLogger(__name__) -# This string is used in the out-of-the-box ctsm.cfg and modify.cfg files -# to denote a value that needs to be filled in -_CONFIG_PLACEHOLDER = "FILL_THIS_IN" -# This string is used in the out-of-the-box ctsm.cfg and modify.cfg files -# to denote a value that can be filled in, but doesn't absolutely need to be -_CONFIG_UNSET = "UNSET" - - def abort(errmsg): """Abort the program with the given error message @@ -76,8 +65,8 @@ def add_tag_to_filename(filename, tag): if basename[cend] == "c": cend = cend - 1 if (basename[cend] != ".") and (basename[cend] != "_"): - logger.error("Trouble figuring out where to add tag to filename:" , filename) - abort() + err_msg = "Trouble figuring out where to add tag to filename: " + filename + abort(err_msg) today = date.today() today_string = today.strftime("%y%m%d") fname_out = basename[:cend] + "_" + tag + "_c" + today_string + ".nc" @@ -134,65 +123,3 @@ def update_metadata(file, title, summary, contact, data_script, description): for attr in del_attrs: if attr in attr_list: del file.attrs[attr] - - -def _handle_config_value( - var, default, item, is_list, convert_to_type, can_be_unset, allowed_values -): - """ - Description - ----------- - Assign the default value or the user-specified one to var. - Convert from default type (str) to reqested type (int or float). - - If is_list is True, then default should be a list - """ - if var == _CONFIG_UNSET: - if can_be_unset: - return default # default may be None - abort("Must set a value for .cfg file variable: {}".format(item)) - - # convert string to list of strings; if there is just one element, - # we will get a list of size one, which we will convert back to a - # scalar later if needed - var = var.split() - - if convert_to_type is bool: - try: - var = [_convert_to_bool(v) for v in var] - except ValueError: - abort("Non-boolean value found for .cfg file variable: {}".format(item)) - elif convert_to_type is not None: - try: - var = [convert_to_type(v) for v in var] - except ValueError: - abort("Wrong type for .cfg file variable: {}".format(item)) - - if allowed_values is not None: - for val in var: - if val not in allowed_values: - print("val = ", val, " in var not in allowed_values") - errmsg = ( - "{} is not an allowed value for {} in .cfg file. " - "Check allowed_values".format(val, item) - ) - abort(errmsg) - - if not is_list: - if len(var) > 1: - abort("More than 1 element found for .cfg file variable: {}".format(item)) - var = var[0] - - return var - - -def _convert_to_bool(val): - """Convert the given value to boolean - - Conversion is as in config files 'getboolean' - """ - if val.lower() in ["1", "yes", "true", "on"]: - return True - if val.lower() in ["0", "no", "false", "off"]: - return False - raise ValueError("{} cannot be converted to boolean".format(val)) From ba95375d3dad5b304daf592a9ee114b1d24e7db2 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 26 Jan 2022 16:33:46 -0700 Subject: [PATCH 127/223] cleaning up pylint. --- python/ctsm/site_and_regional/base_case.py | 1 - python/ctsm/site_and_regional/single_point_case.py | 12 +++++++++--- python/ctsm/subset_data.py | 2 -- python/ctsm/test/test_unit_args_utils.py | 0 4 files changed, 9 insertions(+), 6 deletions(-) mode change 100644 => 100755 python/ctsm/test/test_unit_args_utils.py diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 27637a0a26..b858b27176 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -6,7 +6,6 @@ # -- Import libraries # -- standard libraries -import os import logging from collections import namedtuple diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 642b8b9f0b..45b62462a2 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -180,7 +180,8 @@ def check_dom_pft (self): err_msg = "Please use --crop flag when --dompft is above 16." raise argparse.ArgumentTypeError(err_msg) - if min_dom_pft <=NAT_PFT and max_dom_pft >NAT_PFT: + #-- check if all dom_pft are in the same range: + if min_dom_pft <= NAT_PFT < max_dom_pft: err_msg = """ \n Subsetting using mixed land units is not possible. @@ -269,7 +270,11 @@ def check_pct_pft (self): # -- if the user only gave --pctpft with no --dompft elif self.pct_pft: - err_msg = " --pctpft is specfied without --dompft. Please specify your dominant pft by --dompft." + err_msg = """ + \n + --pctpft is specfied without --dompft. + Please specify your dominant pft by --dompft. + """ raise argparse.ArgumentTypeError (err_msg) logger.info (" - dominant pft(s) : %s",self.dom_pft) @@ -419,7 +424,8 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): f_out["PCT_CROP"][:, :] = 100 else: - logger.info ("You chose --include-nonveg --> Do not zero non-vegetation land units in the surface data.") + logger.info ("You chose --include-nonveg --> \ + Do not zero non-vegetation land units in the surface data.") if self.uni_snow: f_out["STD_ELEV"][:, :] = 20.0 diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index efc4d227b8..e59229ac9d 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -53,7 +53,6 @@ # -- standard libraries import os -import sys import logging import argparse import textwrap @@ -68,7 +67,6 @@ from ctsm.site_and_regional.regional_case import RegionalCase from ctsm.args_utils import plon_type, plat_type from ctsm.path_utils import path_to_ctsm_root -from ctsm.utils import abort # -- import ctsm logging flags from ctsm.ctsm_logging import ( diff --git a/python/ctsm/test/test_unit_args_utils.py b/python/ctsm/test/test_unit_args_utils.py old mode 100644 new mode 100755 From 91eaf58470dee4e1102d4c836761b1eb3c408375 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 26 Jan 2022 16:48:59 -0700 Subject: [PATCH 128/223] New sys test for dom_plant set to a crop in the .cfg file --- python/ctsm/test/test_sys_fsurdat_modifier.py | 56 +++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/python/ctsm/test/test_sys_fsurdat_modifier.py b/python/ctsm/test/test_sys_fsurdat_modifier.py index c7a6f380f5..8603769776 100755 --- a/python/ctsm/test/test_sys_fsurdat_modifier.py +++ b/python/ctsm/test/test_sys_fsurdat_modifier.py @@ -59,6 +59,32 @@ def test_minimalInfo(self): self.assertTrue(fsurdat_out_data.equals(fsurdat_in_data)) + def test_crop(self): + """ + This version replances the vegetation with a crop + """ + + self._create_config_file_crop() + + # run the fsurdat_modifier tool + fsurdat_modifier(self._cfg_file_path) + # the critical piece of this test is that the above command + # doesn't generate errors; however, we also do some assertions below + + # compare fsurdat_out to fsurdat_in + fsurdat_in_data = xr.open_dataset(self._fsurdat_in) + fsurdat_out_data = xr.open_dataset(self._fsurdat_out) + # assert that fsurdat_out does not equal fsurdat_in + self.assertFalse(fsurdat_out_data.equals(fsurdat_in_data)) + + # compare fsurdat_out to fsurdat_out_baseline + fsurdat_out_baseline = self._fsurdat_in[:-3] + '_modified_with_crop' + \ + self._fsurdat_in[-3:] + fsurdat_out_base_data = xr.open_dataset(fsurdat_out_baseline) + # assert that fsurdat_out equals fsurdat_out_baseline + self.assertTrue(fsurdat_out_data.equals(fsurdat_out_base_data)) + + def test_allInfo(self): """ This version specifies all possible information @@ -97,6 +123,36 @@ def _create_config_file_minimal(self): cfg_out.write(line) + def _create_config_file_crop(self): + + with open (self._cfg_file_path, 'w', encoding='utf-8') as cfg_out: + with open (self._cfg_template_path, 'r', encoding='utf-8') as cfg_in: + for line in cfg_in: + if re.match(r' *fsurdat_in *=', line): + line = f'fsurdat_in = {self._fsurdat_in}' + elif re.match(r' *fsurdat_out *=', line): + line = f'fsurdat_out = {self._fsurdat_out}' + elif re.match(r' *lnd_lat_1 *=', line): + line = 'lnd_lat_1 = -10\n' + elif re.match(r' *lnd_lat_2 *=', line): + line = 'lnd_lat_2 = -7\n' + elif re.match(r' *lnd_lon_1 *=', line): + line = 'lnd_lon_1 = 295\n' + elif re.match(r' *lnd_lon_2 *=', line): + line = 'lnd_lon_2 = 300\n' + elif re.match(r' *dom_plant *=', line): + line = 'dom_plant = 15' + elif re.match(r' *lai *=', line): + line = 'lai = 0 1 2 3 4 5 5 4 3 2 1 0\n' + elif re.match(r' *sai *=', line): + line = 'sai = 1 1 1 1 1 1 1 1 1 1 1 1\n' + elif re.match(r' *hgt_top *=', line): + line = 'hgt_top = 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5\n' + elif re.match(r' *hgt_bot *=', line): + line = 'hgt_bot = 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1\n' + cfg_out.write(line) + + def _create_config_file_complete(self): with open (self._cfg_file_path, 'w', encoding='utf-8') as cfg_out: From 4a66bf3eace94c931178778ffe5d5f7249be317b Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 26 Jan 2022 17:01:01 -0700 Subject: [PATCH 129/223] Committing fsurdat_out_baseline file used by new sys test --- ..._16pfts_Irrig_CMIP6_simyr2000_c171214_modified_with_crop.nc | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 python/ctsm/test/testinputs/surfdata_5x5_amazon_16pfts_Irrig_CMIP6_simyr2000_c171214_modified_with_crop.nc diff --git a/python/ctsm/test/testinputs/surfdata_5x5_amazon_16pfts_Irrig_CMIP6_simyr2000_c171214_modified_with_crop.nc b/python/ctsm/test/testinputs/surfdata_5x5_amazon_16pfts_Irrig_CMIP6_simyr2000_c171214_modified_with_crop.nc new file mode 100644 index 0000000000..69f28b2239 --- /dev/null +++ b/python/ctsm/test/testinputs/surfdata_5x5_amazon_16pfts_Irrig_CMIP6_simyr2000_c171214_modified_with_crop.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0217926e5dea2f563a01ad7149be68cf6d0acb0a140715a5402fdf39a925b3e7 +size 247880 From fc88c13bcbf5f1cc34345b38dd618310cb826f88 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 26 Jan 2022 17:39:30 -0700 Subject: [PATCH 130/223] single_point_case.py tests. --- python/ctsm/test/test_unit_singlept_data.py | 238 ++++++++++++++++++++ 1 file changed, 238 insertions(+) create mode 100644 python/ctsm/test/test_unit_singlept_data.py diff --git a/python/ctsm/test/test_unit_singlept_data.py b/python/ctsm/test/test_unit_singlept_data.py new file mode 100644 index 0000000000..1ef34fc483 --- /dev/null +++ b/python/ctsm/test/test_unit_singlept_data.py @@ -0,0 +1,238 @@ +#!/usr/bin/env python3 +""" +Unit tests for SinglePointCase + +You can run this by: + python -m unittest test_unit_singlept_data.py +""" + +import unittest +import argparse +import os +import sys + +# -- add python/ctsm to path (needed if we want to run the test stand-alone) +_CTSM_PYTHON = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) +sys.path.insert(1, _CTSM_PYTHON) + +#pylint: disable=wrong-import-position +from ctsm import unit_testing +from ctsm.site_and_regional.single_point_case import SinglePointCase + +# pylint: disable=invalid-name + + +class TestSinglePointCase(unittest.TestCase): + """ + Basic class for testing SinglePointCase class in single_point_case.py. + """ + + plat = 20.1 + plon = 50.5 + site_name=None + create_domain=True + create_surfdata=True + create_landuse=True + create_datm=True + create_user_mods=True + dom_pft = [8] + pct_pft = None + num_pft = 16 + include_nonveg = False + uni_snow = True + cap_saturation = True + out_dir=os.getcwd() + + + def test_create_tag_noname(self): + """ + Test create_tag when site_name is NOT given. + """ + single_point = SinglePointCase( + plat = self.plat, + plon = self.plon, + site_name = self.site_name, + create_domain = self.create_domain, + create_surfdata = self.create_surfdata, + create_landuse = self.create_landuse, + create_datm = self.create_datm, + create_user_mods = self.create_user_mods, + dom_pft = self.dom_pft, + pct_pft = self.pct_pft, + num_pft = self.num_pft, + include_nonveg = self.include_nonveg, + uni_snow = self.uni_snow, + cap_saturation = self.cap_saturation, + out_dir = self.out_dir, + ) + + single_point.create_tag() + self.assertEqual(single_point.tag,"50.5_20.1" ) + + def test_create_tag_name(self): + """ + Test create_tag when site_name is given. + """ + single_point = SinglePointCase( + plat = self.plat, + plon = self.plon, + site_name = self.site_name, + create_domain = self.create_domain, + create_surfdata = self.create_surfdata, + create_landuse = self.create_landuse, + create_datm = self.create_datm, + create_user_mods = self.create_user_mods, + dom_pft = self.dom_pft, + pct_pft = self.pct_pft, + num_pft = self.num_pft, + include_nonveg = self.include_nonveg, + uni_snow = self.uni_snow, + cap_saturation = self.cap_saturation, + out_dir = self.out_dir, + ) + single_point.site_name = "foo" + single_point.create_tag() + self.assertEqual(single_point.tag,"foo" ) + + def test_check_dom_pft_too_big(self): + """ + Test check_dom_pft + When one of the given dom_pft(s) are bigger than 78 + """ + single_point = SinglePointCase( + plat = self.plat, + plon = self.plon, + site_name = self.site_name, + create_domain = self.create_domain, + create_surfdata = self.create_surfdata, + create_landuse = self.create_landuse, + create_datm = self.create_datm, + create_user_mods = self.create_user_mods, + dom_pft = self.dom_pft, + pct_pft = self.pct_pft, + num_pft = self.num_pft, + include_nonveg = self.include_nonveg, + uni_snow = self.uni_snow, + cap_saturation = self.cap_saturation, + out_dir = self.out_dir, + ) + single_point.dom_pft = [16,36,79] + with self.assertRaisesRegex( argparse.ArgumentTypeError, "values for --dompft should*"): + single_point.check_dom_pft() + + + def test_check_dom_pft_too_small(self): + """ + Test check_dom_pft + When one of the given dom_pft(s) are bigger than 1 + """ + single_point = SinglePointCase( + plat = self.plat, + plon = self.plon, + site_name = self.site_name, + create_domain = self.create_domain, + create_surfdata = self.create_surfdata, + create_landuse = self.create_landuse, + create_datm = self.create_datm, + create_user_mods = self.create_user_mods, + dom_pft = self.dom_pft, + pct_pft = self.pct_pft, + num_pft = self.num_pft, + include_nonveg = self.include_nonveg, + uni_snow = self.uni_snow, + cap_saturation = self.cap_saturation, + out_dir = self.out_dir, + ) + single_point.dom_pft = [16,36,0] + with self.assertRaisesRegex( argparse.ArgumentTypeError, "values for --dompft should*"): + single_point.check_dom_pft() + + + def test_check_dom_pft_numpft(self): + """ + Test check_dom_pft + When dom_pft < 16 but no crop (aka num_pft <16) + """ + single_point = SinglePointCase( + plat = self.plat, + plon = self.plon, + site_name = self.site_name, + create_domain = self.create_domain, + create_surfdata = self.create_surfdata, + create_landuse = self.create_landuse, + create_datm = self.create_datm, + create_user_mods = self.create_user_mods, + dom_pft = self.dom_pft, + pct_pft = self.pct_pft, + num_pft = self.num_pft, + include_nonveg = self.include_nonveg, + uni_snow = self.uni_snow, + cap_saturation = self.cap_saturation, + out_dir = self.out_dir, + ) + single_point.dom_pft = [16, 53] + single_point.num_pft = 16 + with self.assertRaisesRegex( argparse.ArgumentTypeError, "Please use --crop*"): + single_point.check_dom_pft() + + + def test_check_dom_pft_mixed_range(self): + """ + Test check_dom_pft + Test if all dom_pft(s) are in the same range of either 1-15 or 16-78 + """ + single_point = SinglePointCase( + plat = self.plat, + plon = self.plon, + site_name = self.site_name, + create_domain = self.create_domain, + create_surfdata = self.create_surfdata, + create_landuse = self.create_landuse, + create_datm = self.create_datm, + create_user_mods = self.create_user_mods, + dom_pft = self.dom_pft, + pct_pft = self.pct_pft, + num_pft = self.num_pft, + include_nonveg = self.include_nonveg, + uni_snow = self.uni_snow, + cap_saturation = self.cap_saturation, + out_dir = self.out_dir, + ) + single_point.dom_pft = [1,5,16] + with self.assertRaisesRegex( argparse.ArgumentTypeError, "mixed land units is not possible*"): + single_point.check_dom_pft() + + def test_check_nonveg_nodompft(self): + """ + Test check_nonveg + If include_nonveg =False and no dompft it should complain. + """ + single_point = SinglePointCase( + plat = self.plat, + plon = self.plon, + site_name = self.site_name, + create_domain = self.create_domain, + create_surfdata = self.create_surfdata, + create_landuse = self.create_landuse, + create_datm = self.create_datm, + create_user_mods = self.create_user_mods, + dom_pft = self.dom_pft, + pct_pft = self.pct_pft, + num_pft = self.num_pft, + include_nonveg = self.include_nonveg, + uni_snow = self.uni_snow, + cap_saturation = self.cap_saturation, + out_dir = self.out_dir, + ) + single_point.dom_pft = None + single_point.include_nonveg = False + with self.assertRaisesRegex( argparse.ArgumentTypeError, "To include non-veg land units, you need to specify*"): + single_point.check_nonveg() + + + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() From dad831b1e28b20c78e955e27a8938f53777f638f Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 26 Jan 2022 23:53:52 -0700 Subject: [PATCH 131/223] unit tests for single_point_case.py --- python/ctsm/test/test_unit_singlept_data.py | 82 +++++++++++++++++++++ python/ctsm/test/test_unit_subset_data.py | 47 ------------ 2 files changed, 82 insertions(+), 47 deletions(-) delete mode 100644 python/ctsm/test/test_unit_subset_data.py diff --git a/python/ctsm/test/test_unit_singlept_data.py b/python/ctsm/test/test_unit_singlept_data.py index 1ef34fc483..1d9038817d 100644 --- a/python/ctsm/test/test_unit_singlept_data.py +++ b/python/ctsm/test/test_unit_singlept_data.py @@ -230,6 +230,88 @@ def test_check_nonveg_nodompft(self): with self.assertRaisesRegex( argparse.ArgumentTypeError, "To include non-veg land units, you need to specify*"): single_point.check_nonveg() + def test_check_pct_pft_notsamenumbers(self): + """ + Test check_pct_pft + Check if pct_pft is the same length as dom_pft + """ + single_point = SinglePointCase( + plat = self.plat, + plon = self.plon, + site_name = self.site_name, + create_domain = self.create_domain, + create_surfdata = self.create_surfdata, + create_landuse = self.create_landuse, + create_datm = self.create_datm, + create_user_mods = self.create_user_mods, + dom_pft = self.dom_pft, + pct_pft = self.pct_pft, + num_pft = self.num_pft, + include_nonveg = self.include_nonveg, + uni_snow = self.uni_snow, + cap_saturation = self.cap_saturation, + out_dir = self.out_dir, + ) + single_point.dom_pft = [1, 5] + single_point.pct_pft = [0.5] + with self.assertRaisesRegex( argparse.ArgumentTypeError, "Please provide the same number of inputs*"): + single_point.check_pct_pft() + + + def test_check_pct_pft_sum_not1(self): + """ + Test check_pct_pft + Check if pct_pft adds up to 1 or 100. + """ + single_point = SinglePointCase( + plat = self.plat, + plon = self.plon, + site_name = self.site_name, + create_domain = self.create_domain, + create_surfdata = self.create_surfdata, + create_landuse = self.create_landuse, + create_datm = self.create_datm, + create_user_mods = self.create_user_mods, + dom_pft = self.dom_pft, + pct_pft = self.pct_pft, + num_pft = self.num_pft, + include_nonveg = self.include_nonveg, + uni_snow = self.uni_snow, + cap_saturation = self.cap_saturation, + out_dir = self.out_dir, + ) + single_point.dom_pft = [1, 5] + single_point.pct_pft = [0.1,0.5] + with self.assertRaisesRegex( argparse.ArgumentTypeError, "Sum of --pctpft values should be equal to 1*"): + single_point.check_pct_pft() + + + def test_check_pct_pft_fraction_topct(self): + """ + Test check_pct_pft + Check if pct_pft is corretly converted to percent. + """ + single_point = SinglePointCase( + plat = self.plat, + plon = self.plon, + site_name = self.site_name, + create_domain = self.create_domain, + create_surfdata = self.create_surfdata, + create_landuse = self.create_landuse, + create_datm = self.create_datm, + create_user_mods = self.create_user_mods, + dom_pft = self.dom_pft, + pct_pft = self.pct_pft, + num_pft = self.num_pft, + include_nonveg = self.include_nonveg, + uni_snow = self.uni_snow, + cap_saturation = self.cap_saturation, + out_dir = self.out_dir, + ) + single_point.dom_pft = [1, 5, 8] + single_point.pct_pft = [0.5, 0.4, 0.1] + single_point.check_pct_pft() + self.assertEqual(single_point.pct_pft,[50,40,10] ) diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py deleted file mode 100644 index 4276343c4e..0000000000 --- a/python/ctsm/test/test_unit_subset_data.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python3 -""" -Unit tests for subset_data -""" - -import unittest -import argparse - -from ctsm.subset_data import plon_type -from ctsm import unit_testing - -# Allow names that pylint doesn't like, because otherwise I find it hard -# to make readable unit test names -# pylint: disable=invalid-name - - -class TestSubsetData(unittest.TestCase): - """ - Tests for subset_data - """ - def test_plonType_positive(self): - """ - Test plot_type with 30 - """ - result = plon_type(30) - self.assertEqual(result, 30.0) - - def test_plonType_negative(self): - """ - Test plot_type with -30 - """ - result = plon_type(-30) - self.assertEqual(result, 330.0) - - def test_plonType_outOfBounds(self): - """ - Test plot_type with 361 - """ - with self.assertRaisesRegex( - argparse.ArgumentTypeError, "Latitude.*should be between" - ): - _ = plon_type(361) - - -if __name__ == "__main__": - unit_testing.setup_for_tests() - unittest.main() From 5a2aba14ff5cd59789ac5a5712a4e25b8a6bc465 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 26 Jan 2022 23:55:11 -0700 Subject: [PATCH 132/223] few fixes regarding pfts --- python/ctsm/site_and_regional/regional_case.py | 3 ++- python/ctsm/site_and_regional/single_point_case.py | 11 ++++++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 84206dc283..322f9494b9 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -212,5 +212,6 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir): if self.create_user_mods: with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm: - line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) + #line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) + line = "flanduse_timeseries = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) self.write_to_file(line, nl_clm) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 45b62462a2..55ae85a581 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -222,7 +222,7 @@ def check_nonveg (self): You should specify at least one of the following arguments: --dompft DOMPFT - --include_nonveg + --include-nonveg """ raise argparse.ArgumentTypeError(err_msg) @@ -389,11 +389,12 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): #-- modify surface data properties if self.dom_pft is not None: #-- First initialize everything: - f_out["PCT_NAT_PFT"][:, :, 0] = 100 - #f_out["PCT_NATVEG"][:, :] = 0 + #f_out["PCT_NAT_PFT"][:, :, 0] = 100 + ##f_out["PCT_NATVEG"][:, :] = 0 - f_out["PCT_CFT"][:, :, 0] = 100 - #f_out["PCT_CROP"][:, :] = 0 + #f_out["PCT_CFT"][:, :, 0] = 100 + ##f_out["PCT_CROP"][:, :] = 0 + f_out ["PCT_NAT_PFT"] = 0 #-- loop over all dom_pft and pct_pft zip_pfts = zip (self.dom_pft, self.pct_pft) From fc9c6db03fd21ecb2daecf7b67c3a8e814d2773b Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 26 Jan 2022 23:58:01 -0700 Subject: [PATCH 133/223] remove duplicated function. --- tools/site_and_regional/run_neon.py | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/tools/site_and_regional/run_neon.py b/tools/site_and_regional/run_neon.py index a9d31b08e9..6c4e8696fd 100755 --- a/tools/site_and_regional/run_neon.py +++ b/tools/site_and_regional/run_neon.py @@ -633,29 +633,6 @@ def download_file(url, fname): print ('Something went wrong in downloading', fname) print ('Error code:', err.code) - -def download_file(url, fname): - """ - Function to download a file. - - Args: - url (str): - url of the file for downloading - - fname (str) : - file name to save the downloaded file. - """ - response = requests.get(url) - - with open(fname, 'wb') as f: - f.write(response.content) - - #-- Check if download status_code - if response.status_code == 200: - print('Download finished successfully for', fname,'.') - elif response.status_code == 404: - print('File '+fname+'was not available on the neon server:'+ url) - def main(description): cesmroot = path_to_ctsm_root() # Get the list of supported neon sites from usermods From 6540948705081f38dec239369d71ba524af6764d Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 01:24:04 -0700 Subject: [PATCH 134/223] cleaning up run_neon download_file --- python/ctsm/utils.py | 39 +++++++++++++++++++++++++++++ tools/site_and_regional/run_neon.py | 27 ++------------------ 2 files changed, 41 insertions(+), 25 deletions(-) diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 70bef1d951..db607b35d3 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -9,6 +9,8 @@ from datetime import date from getpass import getuser +import requests + logger = logging.getLogger(__name__) def abort(errmsg): @@ -123,3 +125,40 @@ def update_metadata(file, title, summary, contact, data_script, description): for attr in del_attrs: if attr in attr_list: del file.attrs[attr] + +def download_file(url, fname): + """ + Function to download a file. + Args: + url (str): + url of the file for downloading + fname (str) : + file name to save the downloaded file. + + Raises: + Error : + When the file is not available on the server (status_code:404) + Error: + When download fails for any reason. + """ + try: + response = requests.get(url) + + with open(fname, "wb") as this_f: + this_f.write(response.content) + + # -- Check if download status_code + if response.status_code == 200: + logger.info("Download finished successfully for : %s", fname) + + elif response.status_code == 404: + logger.warning ('This file is not available on the server: %s', fname) + err_msg = "Couldn't download file "+fname +"-- Error code: "+ "404" + abort(err_msg) + + # pylint: disable=broad-except + except Exception as err: + logger.warning ('The server could not fulfill the request.') + logger.warning ('Something went wrong in downloading: %s', fname) + err_msg = "Couldn't download file "+fname +"-- Error code:"+ err + abort(err_msg) diff --git a/tools/site_and_regional/run_neon.py b/tools/site_and_regional/run_neon.py index 6c4e8696fd..2d98f66379 100755 --- a/tools/site_and_regional/run_neon.py +++ b/tools/site_and_regional/run_neon.py @@ -76,6 +76,7 @@ from ctsm import add_cime_to_path from ctsm.path_utils import path_to_ctsm_root +from ctsm.utils import download_file import CIME.build as build from standard_script_setup import * @@ -191,7 +192,7 @@ def get_parser(args, description, valid_neon_sites): [default: %(default)s] ''', action="store", - dest="start_date", + dest="start_date", required = False, type = datetime.date.fromisoformat, default = datetime.datetime.strptime("2018-01-01",'%Y-%m-%d')) @@ -608,30 +609,6 @@ def parse_neon_listing(listing_file, valid_neon_sites): return available_list -def download_file(url, fname): - """ - Function to download a file. - Args: - url (str): - url of the file for downloading - fname (str) : - file name to save the downloaded file. - """ - try: - response = requests.get(url) - - with open(fname, "wb") as f: - f.write(response.content) - - # -- Check if download status_code - if response.status_code == 200: - print("Download finished successfully for", fname, ".") - elif response.status_code == 404: - print("File " + fname + "was not available on the neon server:" + url) - except Exception as err: - print ('The server could not fulfill the request.') - print ('Something went wrong in downloading', fname) - print ('Error code:', err.code) def main(description): cesmroot = path_to_ctsm_root() From 4d22aedbcbb338011820df985b2f744c9b414140 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 01:25:19 -0700 Subject: [PATCH 135/223] cleaned up formatting of run_neon. --- tools/site_and_regional/run_neon.py | 689 ++++++++++++++++------------ 1 file changed, 406 insertions(+), 283 deletions(-) diff --git a/tools/site_and_regional/run_neon.py b/tools/site_and_regional/run_neon.py index 2d98f66379..378ea6d426 100755 --- a/tools/site_and_regional/run_neon.py +++ b/tools/site_and_regional/run_neon.py @@ -1,5 +1,5 @@ #! /usr/bin/env python3 - + """ |------------------------------------------------------------------| |--------------------- Instructions -----------------------------| @@ -40,25 +40,25 @@ ./run_neon.py --help ------------------------------------------------------------------- """ -#TODO (NS) -#- [ ] -#- [ ] Case dependency and the ability to check case status -#- [ ] If Case dependency works we don't need finidat given explicilty for post-ad and transient. +# TODO (NS) +# - [ ] +# - [ ] Case dependency and the ability to check case status +# - [ ] If Case dependency works we don't need finidat given explicilty for post-ad and transient. -#- [ ] checkout_externals instead of using env varaiable -#- [ ] wget the fields available and run for those available - -#- [ ] Matrix spin-up if (SASU) Eric merged it in -#- [ ] Make sure both AD and SASU are not on at the same time +# - [ ] checkout_externals instead of using env varaiable +# - [ ] wget the fields available and run for those available -#- [ ] Make sure CIME and other dependencies is checked out. +# - [ ] Matrix spin-up if (SASU) Eric merged it in +# - [ ] Make sure both AD and SASU are not on at the same time + +# - [ ] Make sure CIME and other dependencies is checked out. - -#Import libraries + +# Import libraries import os import sys -import time +import time import shutil import logging import requests @@ -66,12 +66,14 @@ import re import subprocess import pandas as pd -import glob +import glob import datetime from getpass import getuser - + # Get the ctsm util tools and then the cime tools. -_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..","..",'python')) +_CTSM_PYTHON = os.path.abspath( + os.path.join(os.path.dirname(__file__), "..", "..", "python") +) sys.path.insert(1, _CTSM_PYTHON) from ctsm import add_cime_to_path @@ -80,146 +82,172 @@ import CIME.build as build from standard_script_setup import * -from CIME.case import Case -from CIME.utils import safe_copy, expect, symlink_force -from argparse import RawTextHelpFormatter -from CIME.locked_files import lock_file, unlock_file +from CIME.case import Case +from CIME.utils import safe_copy, expect, symlink_force +from argparse import RawTextHelpFormatter +from CIME.locked_files import lock_file, unlock_file -logger = logging.getLogger(__name__) - -def get_parser(args, description, valid_neon_sites): +logger = logging.getLogger(__name__) + + +def get_parser(args, description, valid_neon_sites): """ Get parser object for this script. """ - parser = argparse.ArgumentParser(description=description, - formatter_class=argparse.RawDescriptionHelpFormatter) + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) CIME.utils.setup_standard_logging_options(parser) - + parser.print_usage = parser.print_help - parser.add_argument('--neon-sites', - help='4-letter neon site code.', - action="store", - required=False, - choices=valid_neon_sites + ['all'], - dest="neon_sites", - default=["OSBS"], - nargs='+') - - parser.add_argument('--base-case', - help=''' + parser.add_argument( + "--neon-sites", + help="4-letter neon site code.", + action="store", + required=False, + choices=valid_neon_sites + ["all"], + dest="neon_sites", + default=["OSBS"], + nargs="+", + ) + + parser.add_argument( + "--base-case", + help=""" Root Directory of base case build [default: %(default)s] - ''', - action="store", - dest="base_case_root", - type =str, - required=False, - default=None) - - parser.add_argument('--output-root', - help=''' + """, + action="store", + dest="base_case_root", + type=str, + required=False, + default=None, + ) + + parser.add_argument( + "--output-root", + help=""" Root output directory of cases [default: %(default)s] - ''', - action="store", - dest="output_root", - type =str, - required=False, - default="CIME_OUTPUT_ROOT as defined in cime") - - parser.add_argument('--overwrite', - help=''' + """, + action="store", + dest="output_root", + type=str, + required=False, + default="CIME_OUTPUT_ROOT as defined in cime", + ) + + parser.add_argument( + "--overwrite", + help=""" overwrite existing case directories [default: %(default)s] - ''', - action="store_true", - dest="overwrite", - required = False, - default = False) - - parser.add_argument('--setup-only', - help=''' + """, + action="store_true", + dest="overwrite", + required=False, + default=False, + ) + + parser.add_argument( + "--setup-only", + help=""" Only setup the requested cases, do not build or run [default: %(default)s] - ''', - action="store_true", - dest="setup_only", - required = False, - default = False) - - parser.add_argument('--rerun', - help=''' + """, + action="store_true", + dest="setup_only", + required=False, + default=False, + ) + + parser.add_argument( + "--rerun", + help=""" If the case exists but does not appear to be complete, restart it. [default: %(default)s] - ''', - action="store_true", - dest="rerun", - required = False, - default = False) - - parser.add_argument('--no-batch', - help=''' + """, + action="store_true", + dest="rerun", + required=False, + default=False, + ) + + parser.add_argument( + "--no-batch", + help=""" Run locally, do not use batch queueing system (if defined for Machine) [default: %(default)s] - ''', - action="store_true", - dest="no_batch", - required = False, - default = False) - - parser.add_argument('--run-type', - help=''' + """, + action="store_true", + dest="no_batch", + required=False, + default=False, + ) + + parser.add_argument( + "--run-type", + help=""" Type of run to do [default: %(default)s] - ''', - choices = ["ad", "postad", "transient", "sasu"], - default = "transient") - - parser.add_argument ('--run-length', - help=''' + """, + choices=["ad", "postad", "transient", "sasu"], + default="transient", + ) + + parser.add_argument( + "--run-length", + help=""" How long to run (modified ISO 8601 duration) [default: %(default)s] - ''', - required = False, - type = str, - default = '0Y') - - parser.add_argument('--start-date', - help=''' + """, + required=False, + type=str, + default="0Y", + ) + + parser.add_argument( + "--start-date", + help=""" Start date for running CTSM simulation in ISO format. [default: %(default)s] - ''', - action="store", - dest="start_date", - required = False, - type = datetime.date.fromisoformat, - default = datetime.datetime.strptime("2018-01-01",'%Y-%m-%d')) - - parser.add_argument('--end-date', - help=''' + """, + action="store", + dest="start_date", + required=False, + type=datetime.date.fromisoformat, + default=datetime.datetime.strptime("2018-01-01", "%Y-%m-%d"), + ) + + parser.add_argument( + "--end-date", + help=""" End date for running CTSM simulation in ISO format. [default: %(default)s] - ''', - action="store", - dest="end_date", - required = False, - type = datetime.date.fromisoformat, - default = datetime.datetime.strptime("2021-01-01",'%Y-%m-%d')) - - parser.add_argument('--run-from-postad', - help=''' + """, + action="store", + dest="end_date", + required=False, + type=datetime.date.fromisoformat, + default=datetime.datetime.strptime("2021-01-01", "%Y-%m-%d"), + ) + + parser.add_argument( + "--run-from-postad", + help=""" For transient runs only - should we start from the postad spinup or finidat? By default start from finidat, if this flag is used the postad run must be available. - ''', - action="store_true", - required = False, - default = False) + """, + action="store_true", + required=False, + default=False, + ) args = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser) - if 'all' in args.neon_sites: + if "all" in args.neon_sites: neon_sites = valid_neon_sites else: neon_sites = args.neon_sites @@ -230,15 +258,15 @@ def get_parser(args, description, valid_neon_sites): if "CIME_OUTPUT_ROOT" in args.output_root: args.output_root = None - if args.run_length == '0Y': - if args.run_type == 'ad': - run_length = '200Y' - elif args.run_type == 'postad': - run_length = '50Y' + if args.run_length == "0Y": + if args.run_type == "ad": + run_length = "200Y" + elif args.run_type == "postad": + run_length = "50Y" else: # The transient run length is set by cdeps atm buildnml to the last date of the available tower data # this value is not used - run_length = '4Y' + run_length = "4Y" run_length = parse_isoduration(run_length) base_case_root = None @@ -250,7 +278,19 @@ def get_parser(args, description, valid_neon_sites): root_logger = logging.getLogger() root_logger.setLevel(logging.WARN) - return neon_sites, args.output_root, args.run_type, args.overwrite, run_length, base_case_root, args.run_from_postad, args.setup_only, args.no_batch, args.rerun + return ( + neon_sites, + args.output_root, + args.run_type, + args.overwrite, + run_length, + base_case_root, + args.run_from_postad, + args.setup_only, + args.no_batch, + args.rerun, + ) + def get_isosplit(s, split): if split in s: @@ -259,92 +299,109 @@ def get_isosplit(s, split): n = 0 return n, s + def parse_isoduration(s): - ''' + """ simple ISO 8601 duration parser, does not account for leap years and assumes 30 day months - ''' + """ # Remove prefix - s = s.split('P')[-1] - + s = s.split("P")[-1] + # Step through letter dividers - years, s = get_isosplit(s, 'Y') - months, s = get_isosplit(s, 'M') - days, s = get_isosplit(s, 'D') - + years, s = get_isosplit(s, "Y") + months, s = get_isosplit(s, "M") + days, s = get_isosplit(s, "D") + # Convert all to timedelta - dt = datetime.timedelta(days=int(days)+365*int(years)+30*int(months)) - return int(dt.total_seconds()/86400) + dt = datetime.timedelta(days=int(days) + 365 * int(years) + 30 * int(months)) + return int(dt.total_seconds() / 86400) -class NeonSite : + +class NeonSite: """ A class for encapsulating neon sites. - + ... - + Attributes ---------- - + Methods ------- """ + def __init__(self, name, start_year, end_year, start_month, end_month, finidat): self.name = name - self.start_year= int(start_year) + self.start_year = int(start_year) self.end_year = int(end_year) self.start_month = int(start_month) self.end_month = int(end_month) self.cesmroot = path_to_ctsm_root() self.finidat = finidat - - def __str__(self): - return str(self.__class__) + '\n' + '\n'.join((str(item) + ' = ' - for item in (self.__dict__))) - def build_base_case(self, cesmroot, output_root, res, compset, overwrite=False, setup_only=False): + def __str__(self): + return ( + str(self.__class__) + + "\n" + + "\n".join((str(item) + " = " for item in (self.__dict__))) + ) + + def build_base_case( + self, cesmroot, output_root, res, compset, overwrite=False, setup_only=False + ): """ Function for building a base_case to clone. To spend less time on building ctsm for the neon cases, all the other cases are cloned from this case - + Args: - self: + self: The NeonSite object - base_root (str): - root of the base_case CIME + base_root (str): + root of the base_case CIME res (str): base_case resolution or gridname compset (str): base case compset - overwrite (bool) : + overwrite (bool) : Flag to overwrite the case if exists """ print("---- building a base case -------") self.base_case_root = output_root - user_mods_dirs = [os.path.join(cesmroot,"cime_config","usermods_dirs","NEON",self.name)] + user_mods_dirs = [ + os.path.join(cesmroot, "cime_config", "usermods_dirs", "NEON", self.name) + ] if not output_root: output_root = os.getcwd() - case_path = os.path.join(output_root,self.name) - - logger.info ('base_case_name : {}'.format(self.name)) - logger.info ('user_mods_dir : {}'.format(user_mods_dirs[0])) + case_path = os.path.join(output_root, self.name) + + logger.info("base_case_name : {}".format(self.name)) + logger.info("user_mods_dir : {}".format(user_mods_dirs[0])) if overwrite and os.path.isdir(case_path): - print ("Removing the existing case at: {}".format(case_path)) + print("Removing the existing case at: {}".format(case_path)) shutil.rmtree(case_path) with Case(case_path, read_only=False) as case: if not os.path.isdir(case_path): print("---- creating a base case -------") - case.create(case_path, cesmroot, compset, res, - run_unsupported=True, answer="r",output_root=output_root, - user_mods_dirs = user_mods_dirs, driver="nuopc") + case.create( + case_path, + cesmroot, + compset, + res, + run_unsupported=True, + answer="r", + output_root=output_root, + user_mods_dirs=user_mods_dirs, + driver="nuopc", + ) print("---- base case created ------") - #--change any config for base_case: - #case.set_value("RUN_TYPE","startup") - + # --change any config for base_case: + # case.set_value("RUN_TYPE","startup") print("---- base case setup ------") case.case_setup() @@ -356,26 +413,42 @@ def build_base_case(self, cesmroot, output_root, res, compset, overwrite=False, return case_path print("---- base case build ------") - # always walk through the build process to make sure it's up to date. + # always walk through the build process to make sure it's up to date. t0 = time.time() build.case_build(case_path, case=case) t1 = time.time() - total = t1-t0 - print ("Time required to building the base case: {} s.".format(total)) + total = t1 - t0 + print("Time required to building the base case: {} s.".format(total)) # update case_path to be the full path to the base case return case_path def diff_month(self): - d1 = datetime.datetime(self.end_year,self.end_month, 1) + d1 = datetime.datetime(self.end_year, self.end_month, 1) d2 = datetime.datetime(self.start_year, self.start_month, 1) return (d1.year - d2.year) * 12 + d1.month - d2.month - - - def run_case(self, base_case_root, run_type, run_length, overwrite=False, setup_only=False, no_batch=False, rerun=False): - user_mods_dirs = [os.path.join(self.cesmroot,"cime_config","usermods_dirs","NEON",self.name)] - expect(os.path.isdir(base_case_root), "Error base case does not exist in {}".format(base_case_root)) - case_root = os.path.abspath(os.path.join(base_case_root,"..", self.name+"."+run_type)) + def run_case( + self, + base_case_root, + run_type, + run_length, + overwrite=False, + setup_only=False, + no_batch=False, + rerun=False, + ): + user_mods_dirs = [ + os.path.join( + self.cesmroot, "cime_config", "usermods_dirs", "NEON", self.name + ) + ] + expect( + os.path.isdir(base_case_root), + "Error base case does not exist in {}".format(base_case_root), + ) + case_root = os.path.abspath( + os.path.join(base_case_root, "..", self.name + "." + run_type) + ) rundir = None if os.path.isdir(case_root): if overwrite: @@ -384,78 +457,88 @@ def run_case(self, base_case_root, run_type, run_length, overwrite=False, setup_ elif rerun: with Case(case_root, read_only=False) as case: rundir = case.get_value("RUNDIR") - if os.path.isfile(os.path.join(rundir,"ESMF_Profile.summary")): - print("Case {} appears to be complete, not rerunning.".format(case_root)) + if os.path.isfile(os.path.join(rundir, "ESMF_Profile.summary")): + print( + "Case {} appears to be complete, not rerunning.".format( + case_root + ) + ) elif not setup_only: print("Resubmitting case {}".format(case_root)) case.submit(no_batch=no_batch) return else: - logger.warning("Case already exists in {}, not overwritting.".format(case_root)) + logger.warning( + "Case already exists in {}, not overwritting.".format(case_root) + ) return if run_type == "postad": - adcase_root = case_root.replace('.postad','.ad') + adcase_root = case_root.replace(".postad", ".ad") if not os.path.isdir(adcase_root): - logger.warning("postad requested but no ad case found in {}".format(adcase_root)) + logger.warning( + "postad requested but no ad case found in {}".format(adcase_root) + ) return if not os.path.isdir(case_root): # read_only = False should not be required here with Case(base_case_root, read_only=False) as basecase: print("---- cloning the base case in {}".format(case_root)) - basecase.create_clone(case_root, keepexe=True, user_mods_dirs=user_mods_dirs) + basecase.create_clone( + case_root, keepexe=True, user_mods_dirs=user_mods_dirs + ) with Case(case_root, read_only=False) as case: # in order to avoid the complication of leap years we always set the run_length in units of days. case.set_value("STOP_OPTION", "ndays") case.set_value("STOP_N", run_length) - case.set_value("REST_OPTION","end") + case.set_value("REST_OPTION", "end") case.set_value("CONTINUE_RUN", False) - + if run_type == "ad": - case.set_value("CLM_FORCE_COLDSTART","on") - case.set_value("CLM_ACCELERATED_SPINUP","on") + case.set_value("CLM_FORCE_COLDSTART", "on") + case.set_value("CLM_ACCELERATED_SPINUP", "on") case.set_value("RUN_REFDATE", "0018-01-01") - case.set_value("RUN_STARTDATE", "0018-01-01") + case.set_value("RUN_STARTDATE", "0018-01-01") else: - case.set_value("CLM_FORCE_COLDSTART","off") - case.set_value("CLM_ACCELERATED_SPINUP","off") + case.set_value("CLM_FORCE_COLDSTART", "off") + case.set_value("CLM_ACCELERATED_SPINUP", "off") case.set_value("RUN_TYPE", "hybrid") - + if run_type == "postad": self.set_ref_case(case) - + if run_type == "transient": if self.finidat: - case.set_value("RUN_TYPE","startup") + case.set_value("RUN_TYPE", "startup") else: if not self.set_ref_case(case): return - case.set_value("STOP_OPTION","nmonths") + case.set_value("STOP_OPTION", "nmonths") case.set_value("STOP_N", self.diff_month()) - case.set_value("DATM_YR_ALIGN",self.start_year) - case.set_value("DATM_YR_START",self.start_year) - case.set_value("DATM_YR_END",self.end_year) - case.set_value("CALENDAR","GREGORIAN") + case.set_value("DATM_YR_ALIGN", self.start_year) + case.set_value("DATM_YR_START", self.start_year) + case.set_value("DATM_YR_END", self.end_year) + case.set_value("CALENDAR", "GREGORIAN") else: # for the spinup we want the start and end on year boundaries if self.start_month == 1: - case.set_value("DATM_YR_ALIGN",self.start_year) - case.set_value("DATM_YR_START",self.start_year) + case.set_value("DATM_YR_ALIGN", self.start_year) + case.set_value("DATM_YR_START", self.start_year) elif self.start_year + 1 <= self.end_year: - case.set_value("DATM_YR_ALIGN",self.start_year+1) - case.set_value("DATM_YR_START",self.start_year+1) + case.set_value("DATM_YR_ALIGN", self.start_year + 1) + case.set_value("DATM_YR_START", self.start_year + 1) if self.end_month == 12: - case.set_value("DATM_YR_END",self.end_year) + case.set_value("DATM_YR_END", self.end_year) else: - case.set_value("DATM_YR_END",self.end_year-1) + case.set_value("DATM_YR_END", self.end_year - 1) if not rundir: rundir = case.get_value("RUNDIR") self.modify_user_nl(case_root, run_type, rundir) - + case.create_namelists() # explicitly run check_input_data case.check_all_input_data() @@ -466,25 +549,31 @@ def set_ref_case(self, case): rundir = case.get_value("RUNDIR") case_root = case.get_value("CASEROOT") if case_root.endswith(".postad"): - ref_case_root = case_root.replace(".postad",".ad") + ref_case_root = case_root.replace(".postad", ".ad") root = ".ad" else: - ref_case_root = case_root.replace(".transient",".postad") + ref_case_root = case_root.replace(".transient", ".postad") root = ".postad" if not os.path.isdir(ref_case_root): - logger.warning("ERROR: spinup must be completed first, could not find directory {}".format(ref_case_root)) + logger.warning( + "ERROR: spinup must be completed first, could not find directory {}".format( + ref_case_root + ) + ) return False - + with Case(ref_case_root) as refcase: refrundir = refcase.get_value("RUNDIR") case.set_value("RUN_REFDIR", refrundir) case.set_value("RUN_REFCASE", os.path.basename(ref_case_root)) refdate = None - for reffile in glob.iglob(refrundir + "/{}{}.clm2.r.*.nc".format(self.name, root)): + for reffile in glob.iglob( + refrundir + "/{}{}.clm2.r.*.nc".format(self.name, root) + ): m = re.search("(\d\d\d\d-\d\d-\d\d)-\d\d\d\d\d.nc", reffile) if m: refdate = m.group(1) - symlink_force(reffile, os.path.join(rundir,os.path.basename(reffile))) + symlink_force(reffile, os.path.join(rundir, os.path.basename(reffile))) logger.info("Found refdate of {}".format(refdate)) if not refdate: logger.warning("Could not find refcase for {}".format(case_root)) @@ -492,161 +581,195 @@ def set_ref_case(self, case): for rpfile in glob.iglob(refrundir + "/rpointer*"): safe_copy(rpfile, rundir) - if not os.path.isdir(os.path.join(rundir, "inputdata")) and os.path.isdir(os.path.join(refrundir,"inputdata")): - symlink_force(os.path.join(refrundir,"inputdata"),os.path.join(rundir,"inputdata")) - + if not os.path.isdir(os.path.join(rundir, "inputdata")) and os.path.isdir( + os.path.join(refrundir, "inputdata") + ): + symlink_force( + os.path.join(refrundir, "inputdata"), os.path.join(rundir, "inputdata") + ) case.set_value("RUN_REFDATE", refdate) if case_root.endswith(".postad"): case.set_value("RUN_STARTDATE", refdate) else: - case.set_value("RUN_STARTDATE", "{yr:04d}-{mo:02d}-01".format(yr=self.start_year, mo=self.start_month)) + case.set_value( + "RUN_STARTDATE", + "{yr:04d}-{mo:02d}-01".format(yr=self.start_year, mo=self.start_month), + ) return True - + def modify_user_nl(self, case_root, run_type, rundir): user_nl_fname = os.path.join(case_root, "user_nl_clm") user_nl_lines = None if run_type == "transient": if self.finidat: - user_nl_lines = ["finidat = '{}/inputdata/lnd/ctsm/initdata/{}'".format(rundir,self.finidat)] + user_nl_lines = [ + "finidat = '{}/inputdata/lnd/ctsm/initdata/{}'".format( + rundir, self.finidat + ) + ] else: user_nl_lines = [ "hist_fincl2 = ''", "hist_mfilt = 20", "hist_nhtfrq = -8760", "hist_empty_htapes = .true.", - "hist_fincl1 = 'TOTECOSYSC', 'TOTECOSYSN', 'TOTSOMC', 'TOTSOMN', 'TOTVEGC', 'TOTVEGN', 'TLAI', 'GPP', 'CPOOL', 'NPP', 'TWS', 'H2OSNO'"] - + "hist_fincl1 = 'TOTECOSYSC', 'TOTECOSYSN', 'TOTSOMC', 'TOTSOMN', 'TOTVEGC', 'TOTVEGN', 'TLAI', 'GPP', 'CPOOL', 'NPP', 'TWS', 'H2OSNO'", + ] + if user_nl_lines: with open(user_nl_fname, "a") as fd: for line in user_nl_lines: fd.write("{}\n".format(line)) - def check_neon_listing(valid_neon_sites): """ A function to download and parse neon listing file. """ - listing_file = 'listing.csv' - url = 'https://neon-ncar.s3.data.neonscience.org/listing.csv' - + listing_file = "listing.csv" + url = "https://neon-ncar.s3.data.neonscience.org/listing.csv" + download_file(url, listing_file) - available_list= parse_neon_listing(listing_file, valid_neon_sites) + available_list = parse_neon_listing(listing_file, valid_neon_sites) return available_list + def parse_neon_listing(listing_file, valid_neon_sites): """ A function to parse neon listing file and find neon sites with the dates where data is available. - + Args: listing_file (str): downloaded listing file - + Returns: available_list : list of neon_site objects that is found on the downloaded listing file. """ - - #pd.set_option("display.max_rows", None, "display.max_columns", None) - + + # pd.set_option("display.max_rows", None, "display.max_columns", None) + available_list = [] - + df = pd.read_csv(listing_file) - + # check for finidat files for transient run - finidatlist = df[df['object'].str.contains("lnd/ctsm")] + finidatlist = df[df["object"].str.contains("lnd/ctsm")] - #-- filter lines with atm/cdep - df = df[df['object'].str.contains("atm/cdeps/")] + # -- filter lines with atm/cdep + df = df[df["object"].str.contains("atm/cdeps/")] - #-- split the object str to extract site name - df=df['object'].str.split("/", expand=True) - - #-- groupby site name + # -- split the object str to extract site name + df = df["object"].str.split("/", expand=True) + + # -- groupby site name grouped_df = df.groupby(7) for key, item in grouped_df: - #-- check if it is a valid neon site + # -- check if it is a valid neon site if any(key in x for x in valid_neon_sites): site_name = key tmp_df = grouped_df.get_group(key) - - #-- filter files only ending with YYYY-MM.nc - tmp_df = tmp_df[tmp_df[8].str.contains('\d\d\d\d-\d\d.nc')] + + # -- filter files only ending with YYYY-MM.nc + tmp_df = tmp_df[tmp_df[8].str.contains("\d\d\d\d-\d\d.nc")] latest_version = tmp_df[6].iloc[-1] tmp_df = tmp_df[tmp_df[6].str.contains(latest_version)] - #-- remove .nc from the file names - tmp_df[8] = tmp_df[8].str.replace('.nc','') - - tmp_df2 = tmp_df[8].str.split("-", expand=True) + # -- remove .nc from the file names + tmp_df[8] = tmp_df[8].str.replace(".nc", "") + + tmp_df2 = tmp_df[8].str.split("-", expand=True) # ignore any prefix in file name and just get year tmp_df2[0] = tmp_df2[0].str.slice(-4) - #-- figure out start_year and end_year + # -- figure out start_year and end_year start_year = int(tmp_df2[0].iloc[0]) end_year = int(tmp_df2[0].iloc[-1]) - - #-- figure out start_month and end_month + + # -- figure out start_month and end_month start_month = int(tmp_df2[1].iloc[0]) end_month = int(tmp_df2[1].iloc[-1]) - logger.debug ("Valid neon site " + site_name+" found!") - logger.debug ("File version {}".format(latest_version)) - logger.debug ('start_year={}'.format(start_year)) - logger.debug ('end_year={}'.format(end_year)) - logger.debug ('start_month={}'.format(start_month)) - logger.debug ('end_month={}'.format(end_month)) + logger.debug("Valid neon site " + site_name + " found!") + logger.debug("File version {}".format(latest_version)) + logger.debug("start_year={}".format(start_year)) + logger.debug("end_year={}".format(end_year)) + logger.debug("start_month={}".format(start_month)) + logger.debug("end_month={}".format(end_month)) finidat = None - for line in finidatlist['object']: + for line in finidatlist["object"]: if site_name in line: - finidat = line.split(',')[0].split('/')[-1] - - neon_site = NeonSite(site_name, start_year, end_year, start_month, end_month, finidat) - logger.debug (neon_site) + finidat = line.split(",")[0].split("/")[-1] + + neon_site = NeonSite( + site_name, start_year, end_year, start_month, end_month, finidat + ) + logger.debug(neon_site) available_list.append(neon_site) - + return available_list def main(description): cesmroot = path_to_ctsm_root() # Get the list of supported neon sites from usermods - valid_neon_sites = glob.glob(os.path.join(cesmroot,"cime_config","usermods_dirs","NEON","[!d]*")) - valid_neon_sites = sorted([v.split('/')[-1] for v in valid_neon_sites]) - - site_list, output_root, run_type, overwrite, run_length, base_case_root, run_from_postad, setup_only, no_batch, rerun = get_parser(sys.argv, description, valid_neon_sites) + valid_neon_sites = glob.glob( + os.path.join(cesmroot, "cime_config", "usermods_dirs", "NEON", "[!d]*") + ) + valid_neon_sites = sorted([v.split("/")[-1] for v in valid_neon_sites]) + + ( + site_list, + output_root, + run_type, + overwrite, + run_length, + base_case_root, + run_from_postad, + setup_only, + no_batch, + rerun, + ) = get_parser(sys.argv, description, valid_neon_sites) if output_root: - logger.debug ("output_root : "+ output_root) + logger.debug("output_root : " + output_root) if not os.path.exists(output_root): os.makedirs(output_root) - #-- check neon listing file for available data: + # -- check neon listing file for available data: available_list = check_neon_listing(valid_neon_sites) - #================================= - #-- all neon sites can be cloned from one generic case - #-- so no need to define a base_case for every site. + # ================================= + # -- all neon sites can be cloned from one generic case + # -- so no need to define a base_case for every site. res = "CLM_USRDAT" compset = "I1PtClm51Bgc" - #-- Looping over neon sites + # -- Looping over neon sites - for neon_site in available_list: + for neon_site in available_list: if neon_site.name in site_list: if run_from_postad: neon_site.finidat = None if not base_case_root: - base_case_root = neon_site.build_base_case(cesmroot, output_root, res, - compset, overwrite, setup_only) - logger.info ("-----------------------------------") - logger.info ("Running CTSM for neon site : {}".format(neon_site.name)) - neon_site.run_case(base_case_root, run_type, run_length, overwrite, setup_only, no_batch, rerun) - -if __name__ == "__main__": - main(__doc__) - - + base_case_root = neon_site.build_base_case( + cesmroot, output_root, res, compset, overwrite, setup_only + ) + logger.info("-----------------------------------") + logger.info("Running CTSM for neon site : {}".format(neon_site.name)) + neon_site.run_case( + base_case_root, + run_type, + run_length, + overwrite, + setup_only, + no_batch, + rerun, + ) + + +if __name__ == "__main__": + main(__doc__) From 4c2276579bb75147204240c1b0990b7b54952c5b Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 02:05:34 -0700 Subject: [PATCH 136/223] addressing more comments. --- python/ctsm/subset_data.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index e59229ac9d..1d6ea502c6 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -167,14 +167,6 @@ def get_parser(): default=None, nargs='*', ) - pt_parser.add_argument( - "--datm-from-tower", - help="Create DATM forcing data at single point for a tower data.", - action="store_true", - dest="datm_tower", - required=False, - default=False, - ) # -- region-specific parser options rg_parser.add_argument( "--lat1", @@ -234,7 +226,8 @@ def get_parser(): for subparser in [pt_parser, rg_parser]: subparser.add_argument( "--create-domain", - help="Create CLM domain file at single point/region.", + help="Create CLM domain file at single point/region. \ + Domain files are not needed for NUOPC cases.", action="store_true", dest="create_domain", required=False, @@ -255,7 +248,7 @@ def get_parser(): ) subparser.add_argument( "--create-datm", - help="Create DATM forcing data at single point/region.", + help="Create DATM forcing data at single point.", action="store_true", dest="create_datm", required=False, From 13fec89d1b8f5ee2fcd3e406a66f3fa226ef085e Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 02:20:26 -0700 Subject: [PATCH 137/223] addressing more comments. --- tools/site_and_regional/subset_data | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/tools/site_and_regional/subset_data b/tools/site_and_regional/subset_data index 680de27aff..bb582b21f8 100755 --- a/tools/site_and_regional/subset_data +++ b/tools/site_and_regional/subset_data @@ -10,17 +10,21 @@ please check python/ctsm/subset_data.py file. This script extracts domain files, surface dataset, and DATM files at either a single point or a region using the global dataset. -To see all available options for single-point subsetting: - ./subset_data point --help -To see all available options for region subsetting: - ./subset_data region --help - To run this script the following packages are required: - numpy - xarray ---------------------------------------------------------------- To see all available options for single-point/regional subsetting: ./subset_data --help + +---------------------------------------------------------------- +Instructions for running on Cheyenne/Casper: + load the following into your local environment + module load python + ncar_pylib + +To remove from your environment on Cheyenne/Casper: + deactivate """ import os From 8d35b1091e67ac9df3da02b5466a83f000a9091c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 02:41:27 -0700 Subject: [PATCH 138/223] update readme. --- tools/site_and_regional/README | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/tools/site_and_regional/README b/tools/site_and_regional/README index 7ab81b370e..957e26b9a2 100644 --- a/tools/site_and_regional/README +++ b/tools/site_and_regional/README @@ -11,13 +11,29 @@ ncar_pylib Brief description of scripts: subset_data - create regional domain, surface data, and rtm directional files by - extracting data from global datasets + This script extracts domain files, surface dataset, and DATM files + at either a single point or a region using the global dataset. + For extracting domain files, surface dataset, and DATM files at a single point, use: + ./subset_data point + + For extracting domain files, surface dataset, and DATM files at a region, use: + ./subset_data region modify_singlept_site_neon.py - After running subset_data.py overwrite some fields with site-specific - data for neon sites + After running subset_data.py overwrite some fields with site-specific + data for neon sites. This code uses neon_sites_dompft.csv to determine dom-pft values. + +run_neon.py + Wrapper script for running CTSM simulations for one or more + neon sites for spin-up or transient run types. + +neon_surf_wrapper.py + Wrapper script that run subset_data to extract data for all neon points and then + use modify_singlept_site_neon.py to update site-specific fields. +neon_s3_upload + Script to rename and upload NEON site finidat files to NEON s3 bucket + for use in transient startup cases DEPRECATED SCRIPTS: From b4660c38e3332946d8141ababe7d4869136984b6 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 04:03:54 -0700 Subject: [PATCH 139/223] pylint issues --- python/ctsm/test/test_unit_singlept_data.py | 367 ++++++++++---------- 1 file changed, 187 insertions(+), 180 deletions(-) diff --git a/python/ctsm/test/test_unit_singlept_data.py b/python/ctsm/test/test_unit_singlept_data.py index 1d9038817d..6f1d14ffa9 100644 --- a/python/ctsm/test/test_unit_singlept_data.py +++ b/python/ctsm/test/test_unit_singlept_data.py @@ -13,10 +13,11 @@ # -- add python/ctsm to path (needed if we want to run the test stand-alone) _CTSM_PYTHON = os.path.join( - os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir +) sys.path.insert(1, _CTSM_PYTHON) -#pylint: disable=wrong-import-position +# pylint: disable=wrong-import-position from ctsm import unit_testing from ctsm.site_and_regional.single_point_case import SinglePointCase @@ -30,70 +31,69 @@ class TestSinglePointCase(unittest.TestCase): plat = 20.1 plon = 50.5 - site_name=None - create_domain=True - create_surfdata=True - create_landuse=True - create_datm=True - create_user_mods=True + site_name = None + create_domain = True + create_surfdata = True + create_landuse = True + create_datm = True + create_user_mods = True dom_pft = [8] pct_pft = None num_pft = 16 include_nonveg = False uni_snow = True cap_saturation = True - out_dir=os.getcwd() - + out_dir = os.getcwd() def test_create_tag_noname(self): """ Test create_tag when site_name is NOT given. """ single_point = SinglePointCase( - plat = self.plat, - plon = self.plon, - site_name = self.site_name, - create_domain = self.create_domain, - create_surfdata = self.create_surfdata, - create_landuse = self.create_landuse, - create_datm = self.create_datm, - create_user_mods = self.create_user_mods, - dom_pft = self.dom_pft, - pct_pft = self.pct_pft, - num_pft = self.num_pft, - include_nonveg = self.include_nonveg, - uni_snow = self.uni_snow, - cap_saturation = self.cap_saturation, - out_dir = self.out_dir, + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, ) single_point.create_tag() - self.assertEqual(single_point.tag,"50.5_20.1" ) + self.assertEqual(single_point.tag, "50.5_20.1") def test_create_tag_name(self): """ Test create_tag when site_name is given. """ single_point = SinglePointCase( - plat = self.plat, - plon = self.plon, - site_name = self.site_name, - create_domain = self.create_domain, - create_surfdata = self.create_surfdata, - create_landuse = self.create_landuse, - create_datm = self.create_datm, - create_user_mods = self.create_user_mods, - dom_pft = self.dom_pft, - pct_pft = self.pct_pft, - num_pft = self.num_pft, - include_nonveg = self.include_nonveg, - uni_snow = self.uni_snow, - cap_saturation = self.cap_saturation, - out_dir = self.out_dir, + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, ) single_point.site_name = "foo" single_point.create_tag() - self.assertEqual(single_point.tag,"foo" ) + self.assertEqual(single_point.tag, "foo") def test_check_dom_pft_too_big(self): """ @@ -101,106 +101,109 @@ def test_check_dom_pft_too_big(self): When one of the given dom_pft(s) are bigger than 78 """ single_point = SinglePointCase( - plat = self.plat, - plon = self.plon, - site_name = self.site_name, - create_domain = self.create_domain, - create_surfdata = self.create_surfdata, - create_landuse = self.create_landuse, - create_datm = self.create_datm, - create_user_mods = self.create_user_mods, - dom_pft = self.dom_pft, - pct_pft = self.pct_pft, - num_pft = self.num_pft, - include_nonveg = self.include_nonveg, - uni_snow = self.uni_snow, - cap_saturation = self.cap_saturation, - out_dir = self.out_dir, + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, ) - single_point.dom_pft = [16,36,79] - with self.assertRaisesRegex( argparse.ArgumentTypeError, "values for --dompft should*"): + single_point.dom_pft = [16, 36, 79] + with self.assertRaisesRegex( + argparse.ArgumentTypeError, "values for --dompft should*" + ): single_point.check_dom_pft() - def test_check_dom_pft_too_small(self): """ Test check_dom_pft When one of the given dom_pft(s) are bigger than 1 """ single_point = SinglePointCase( - plat = self.plat, - plon = self.plon, - site_name = self.site_name, - create_domain = self.create_domain, - create_surfdata = self.create_surfdata, - create_landuse = self.create_landuse, - create_datm = self.create_datm, - create_user_mods = self.create_user_mods, - dom_pft = self.dom_pft, - pct_pft = self.pct_pft, - num_pft = self.num_pft, - include_nonveg = self.include_nonveg, - uni_snow = self.uni_snow, - cap_saturation = self.cap_saturation, - out_dir = self.out_dir, + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, ) - single_point.dom_pft = [16,36,0] - with self.assertRaisesRegex( argparse.ArgumentTypeError, "values for --dompft should*"): + single_point.dom_pft = [16, 36, 0] + with self.assertRaisesRegex( + argparse.ArgumentTypeError, "values for --dompft should*" + ): single_point.check_dom_pft() - def test_check_dom_pft_numpft(self): """ Test check_dom_pft When dom_pft < 16 but no crop (aka num_pft <16) """ single_point = SinglePointCase( - plat = self.plat, - plon = self.plon, - site_name = self.site_name, - create_domain = self.create_domain, - create_surfdata = self.create_surfdata, - create_landuse = self.create_landuse, - create_datm = self.create_datm, - create_user_mods = self.create_user_mods, - dom_pft = self.dom_pft, - pct_pft = self.pct_pft, - num_pft = self.num_pft, - include_nonveg = self.include_nonveg, - uni_snow = self.uni_snow, - cap_saturation = self.cap_saturation, - out_dir = self.out_dir, + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, ) single_point.dom_pft = [16, 53] single_point.num_pft = 16 - with self.assertRaisesRegex( argparse.ArgumentTypeError, "Please use --crop*"): + with self.assertRaisesRegex(argparse.ArgumentTypeError, "Please use --crop*"): single_point.check_dom_pft() - def test_check_dom_pft_mixed_range(self): """ Test check_dom_pft Test if all dom_pft(s) are in the same range of either 1-15 or 16-78 """ single_point = SinglePointCase( - plat = self.plat, - plon = self.plon, - site_name = self.site_name, - create_domain = self.create_domain, - create_surfdata = self.create_surfdata, - create_landuse = self.create_landuse, - create_datm = self.create_datm, - create_user_mods = self.create_user_mods, - dom_pft = self.dom_pft, - pct_pft = self.pct_pft, - num_pft = self.num_pft, - include_nonveg = self.include_nonveg, - uni_snow = self.uni_snow, - cap_saturation = self.cap_saturation, - out_dir = self.out_dir, + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, ) - single_point.dom_pft = [1,5,16] - with self.assertRaisesRegex( argparse.ArgumentTypeError, "mixed land units is not possible*"): + single_point.dom_pft = [1, 5, 16] + with self.assertRaisesRegex( + argparse.ArgumentTypeError, "mixed land units is not possible*" + ): single_point.check_dom_pft() def test_check_nonveg_nodompft(self): @@ -209,25 +212,28 @@ def test_check_nonveg_nodompft(self): If include_nonveg =False and no dompft it should complain. """ single_point = SinglePointCase( - plat = self.plat, - plon = self.plon, - site_name = self.site_name, - create_domain = self.create_domain, - create_surfdata = self.create_surfdata, - create_landuse = self.create_landuse, - create_datm = self.create_datm, - create_user_mods = self.create_user_mods, - dom_pft = self.dom_pft, - pct_pft = self.pct_pft, - num_pft = self.num_pft, - include_nonveg = self.include_nonveg, - uni_snow = self.uni_snow, - cap_saturation = self.cap_saturation, - out_dir = self.out_dir, + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, ) single_point.dom_pft = None single_point.include_nonveg = False - with self.assertRaisesRegex( argparse.ArgumentTypeError, "To include non-veg land units, you need to specify*"): + with self.assertRaisesRegex( + argparse.ArgumentTypeError, + "To include non-veg land units, you need to specify*", + ): single_point.check_nonveg() def test_check_pct_pft_notsamenumbers(self): @@ -236,83 +242,84 @@ def test_check_pct_pft_notsamenumbers(self): Check if pct_pft is the same length as dom_pft """ single_point = SinglePointCase( - plat = self.plat, - plon = self.plon, - site_name = self.site_name, - create_domain = self.create_domain, - create_surfdata = self.create_surfdata, - create_landuse = self.create_landuse, - create_datm = self.create_datm, - create_user_mods = self.create_user_mods, - dom_pft = self.dom_pft, - pct_pft = self.pct_pft, - num_pft = self.num_pft, - include_nonveg = self.include_nonveg, - uni_snow = self.uni_snow, - cap_saturation = self.cap_saturation, - out_dir = self.out_dir, + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, ) single_point.dom_pft = [1, 5] single_point.pct_pft = [0.5] - with self.assertRaisesRegex( argparse.ArgumentTypeError, "Please provide the same number of inputs*"): + with self.assertRaisesRegex( + argparse.ArgumentTypeError, "Please provide the same number of inputs*" + ): single_point.check_pct_pft() - def test_check_pct_pft_sum_not1(self): """ Test check_pct_pft Check if pct_pft adds up to 1 or 100. """ single_point = SinglePointCase( - plat = self.plat, - plon = self.plon, - site_name = self.site_name, - create_domain = self.create_domain, - create_surfdata = self.create_surfdata, - create_landuse = self.create_landuse, - create_datm = self.create_datm, - create_user_mods = self.create_user_mods, - dom_pft = self.dom_pft, - pct_pft = self.pct_pft, - num_pft = self.num_pft, - include_nonveg = self.include_nonveg, - uni_snow = self.uni_snow, - cap_saturation = self.cap_saturation, - out_dir = self.out_dir, + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, ) single_point.dom_pft = [1, 5] - single_point.pct_pft = [0.1,0.5] - with self.assertRaisesRegex( argparse.ArgumentTypeError, "Sum of --pctpft values should be equal to 1*"): + single_point.pct_pft = [0.1, 0.5] + with self.assertRaisesRegex( + argparse.ArgumentTypeError, "Sum of --pctpft values should be equal to 1*" + ): single_point.check_pct_pft() - def test_check_pct_pft_fraction_topct(self): """ Test check_pct_pft Check if pct_pft is corretly converted to percent. """ single_point = SinglePointCase( - plat = self.plat, - plon = self.plon, - site_name = self.site_name, - create_domain = self.create_domain, - create_surfdata = self.create_surfdata, - create_landuse = self.create_landuse, - create_datm = self.create_datm, - create_user_mods = self.create_user_mods, - dom_pft = self.dom_pft, - pct_pft = self.pct_pft, - num_pft = self.num_pft, - include_nonveg = self.include_nonveg, - uni_snow = self.uni_snow, - cap_saturation = self.cap_saturation, - out_dir = self.out_dir, + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, ) single_point.dom_pft = [1, 5, 8] single_point.pct_pft = [0.5, 0.4, 0.1] single_point.check_pct_pft() - self.assertEqual(single_point.pct_pft,[50,40,10] ) - + self.assertEqual(single_point.pct_pft, [50, 40, 10]) if __name__ == "__main__": From 0c35f06c61515996dbe743f705d6a55d3a62baf5 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 04:04:28 -0700 Subject: [PATCH 140/223] correct initialization and fix pylint. --- .../ctsm/site_and_regional/single_point_case.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 55ae85a581..36ac65a531 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -367,6 +367,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): """ Create surface data file at a single point. """ + # pylint: disable=too-many-statements logger.info("----------------------------------------------------------------------") logger.info( "Creating surface dataset file at %s, %s", self.plon.__str__(), self.plat.__str__()) @@ -388,13 +389,17 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): #-- modify surface data properties if self.dom_pft is not None: + max_dom_pft = max(self.dom_pft) #-- First initialize everything: - #f_out["PCT_NAT_PFT"][:, :, 0] = 100 - ##f_out["PCT_NATVEG"][:, :] = 0 + if max_dom_pft <=NAT_PFT : + f_out ["PCT_NAT_PFT"][:,:,:] = 0 + else: + f_out["PCT_CFT"][:,:,:] = 0 - #f_out["PCT_CFT"][:, :, 0] = 100 - ##f_out["PCT_CROP"][:, :] = 0 - f_out ["PCT_NAT_PFT"] = 0 + # Do we need to initialize these here? + # Because we set them in include_nonveg + #f_out["PCT_NATVEG"][:, :] = 0 + #f_out["PCT_CROP"][:, :] = 0 #-- loop over all dom_pft and pct_pft zip_pfts = zip (self.dom_pft, self.pct_pft) @@ -408,6 +413,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # ------------------------------- # By default include_nonveg=False # When we use --include-nonveg we turn it to True + # Therefore by default we are hitting the following if: if not self.include_nonveg: logger.info ("Zeroing out non-vegetation land units in the surface data.") From 935d72cac2ab17f1e773bb76a15e2cca6708e940 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 05:01:20 -0700 Subject: [PATCH 141/223] update neon script to reflect the recent changes. --- tools/site_and_regional/neon_surf_wrapper.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/site_and_regional/neon_surf_wrapper.py b/tools/site_and_regional/neon_surf_wrapper.py index e5b37a74e3..3c58b1cd51 100755 --- a/tools/site_and_regional/neon_surf_wrapper.py +++ b/tools/site_and_regional/neon_surf_wrapper.py @@ -97,7 +97,8 @@ def main(): site = row['Site'] pft = row['pft'] print ("Now processing site :", site) - command = ['./subset_data.py','point','--lat',str(lat),'--lon',str(lon),'--site',site,'--dompft',str(pft),'--crop'] + command = ['./subset_data','point','--lat',str(lat),'--lon',str(lon),'--site',site,'--dompft',str(pft),'--crop', + '--create-surface','--uniform-snowpack','--cap-saturation','--verbose'] execute(command) command = ['./modify_singlept_site_neon.py','--neon_site',site] From 4a5469a8a8951c4c26e70d253483953d290353d9 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 09:49:45 -0700 Subject: [PATCH 142/223] update utils.py try block to move response outside --- python/ctsm/utils.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index db607b35d3..8b1373de3d 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -144,21 +144,21 @@ def download_file(url, fname): try: response = requests.get(url) - with open(fname, "wb") as this_f: - this_f.write(response.content) - - # -- Check if download status_code - if response.status_code == 200: - logger.info("Download finished successfully for : %s", fname) - - elif response.status_code == 404: - logger.warning ('This file is not available on the server: %s', fname) - err_msg = "Couldn't download file "+fname +"-- Error code: "+ "404" - abort(err_msg) - # pylint: disable=broad-except except Exception as err: logger.warning ('The server could not fulfill the request.') logger.warning ('Something went wrong in downloading: %s', fname) err_msg = "Couldn't download file "+fname +"-- Error code:"+ err abort(err_msg) + + with open(fname, "wb") as this_f: + this_f.write(response.content) + + # -- Check if download status_code + if response.status_code == 200: + logger.info("Download finished successfully for : %s", fname) + + elif response.status_code == 404: + logger.warning ('This file is not available on the server: %s', fname) + err_msg = "Couldn't download file "+fname +"-- Error code: "+ "404" + abort(err_msg) From 3ac329f92332fe7dfbd00818a4c715dc5c2f9f48 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 27 Jan 2022 10:00:16 -0700 Subject: [PATCH 143/223] ChangeLog/ChangeSum drafts --- doc/ChangeLog | 84 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 85 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index dba2842cc1..901725d21c 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,88 @@ =============================================================== +Tag name: ctsm5.1.dev073 +Originator(s): slevis (Samuel Levis,SLevis Consulting,303-665-1310) +Date: Thu Jan 27 09:52:00 MST 2022 +One-line Summary: Replace dom_nat_pft with dom_plant to enable crop in fsurdat_modifier tool + +Purpose and description of changes +---------------------------------- + + Allow user to replace vegetation in fsurdat files with any pft/cft using the + fsurdat_modifier tool option dom_plant. This option replaces now obsolete + option dom_nat_pft which handled pfts only and not cfts. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Notes of particular relevance for users +--------------------------------------- +Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables): + Instead of dom_nat_pft = UNSET, modify_template.cfg now includes the line + dom_plant = UNSET to allow users to set the pft/cft of their choice to replace + the existing vegetation. + +Changes to the datasets (e.g., parameter, surface or initial files): + New system test that checks the new code compares a generated file to a + baseline file. I added the baseline file to this PR: + .../python/ctsm/test/testinputs/surfdata_5x5_amazon_16pfts_Irrig_CMIP6_simyr2000_c171214_modified_with_crop.nc + + +Notes of particular relevance for developers: +--------------------------------------------- +Changes to tests or testing: + Added a system test to test_sys_fsurdat_modifier.py to run with the new option + dom_plant set to 15 (i.e. a crop). + + +Testing summary: +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + (any machine) - cheyenne PASS + + [If python code has changed and you are NOT running aux_clm (e.g., because the only changes are in python + code) then also run the clm_pymods test suite; this is a small subset of aux_clm that runs the system + tests impacted by python changes. The best way to do this, if you expect no changes from the last tag in + either model output or namelists, is: create sym links pointing to the last tag's baseline directory, + named with the upcoming tag; then run the clm_pymods test suite comparing against these baselines but NOT + doing their own baseline generation. If you are already running the full aux_clm then you do NOT need to + separately run the clm_pymods test suite, and you can remove the following line.] + + clm_pymods test suite on cheyenne - PASS + + any other testing (give details below): + + +Answer changes +-------------- +Changes answers relative to baseline: NO + + +Other details +------------- +Pull Requests that document the changes (include PR ids): + https://github.com/ESCOMP/ctsm/pull/1615 + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev072 Originator(s): negins (Negin Sobhani,UCAR/TSS,303-497-1224) Date: Mon Jan 17 10:50:25 MST 2022 diff --git a/doc/ChangeSum b/doc/ChangeSum index 9392ae82b0..cabc3a3e81 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev073 slevis 01/26/2022 Replace dom_nat_pft with dom_plant to enable crop in fsurdat_modifier tool ctsm5.1.dev072 negins 01/17/2022 mksurfdat toolchain part 1: gen_mksurf_namelist ctsm5.1.dev071 glemieux 01/16/2022 Small changes to enable new fates dimension and update fates tag ctsm5.1.dev070 multiple 01/10/2022 Update externals, remove need for LND_DOMAIN_FILE and LND_DOMAIN_PATH, etc. From 500e973e3b369fec8fe2d09812390bd70270ddab Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 12:36:19 -0700 Subject: [PATCH 144/223] update dom_pft-1 handling which was done by mistake. --- .../site_and_regional/single_point_case.py | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 36ac65a531..f3aad5d55f 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -144,9 +144,9 @@ def check_dom_pft (self): same range. e.g. If users specified multiple dom_pft, they should be either in : - - 1-15 range + - 1-14 range or - - 16-78 range + - 15-78 range - give an error : mixed land units not possible. dom_pft in netcdf: 1-15 which tranlate to 0-14 @@ -158,7 +158,7 @@ def check_dom_pft (self): If any dom_pft is less than 1. Error (ArgumentTypeError): If mixed land units are chosen. - dom_pft values are both in range of 1-15 and 16-78. + dom_pft values are both in range of 1-14 and 15-78. """ @@ -176,19 +176,21 @@ def check_dom_pft (self): raise argparse.ArgumentTypeError(err_msg) #-- check dom_pft vs num_pft - if max_dom_pft > self.num_pft : - err_msg = "Please use --crop flag when --dompft is above 16." + print (max_dom_pft) + print (self.num_pft) + if self.num_pft -1 < max_dom_pft < MAX_PFT : + err_msg = "Please use --crop flag when --dompft is above 15." raise argparse.ArgumentTypeError(err_msg) #-- check if all dom_pft are in the same range: - if min_dom_pft <= NAT_PFT < max_dom_pft: + if min_dom_pft < NAT_PFT <= max_dom_pft: err_msg = """ \n Subsetting using mixed land units is not possible. Please make sure all --dompft values are in only one of these ranges: - - 1-15 - - 16-78 + - 1-14 + - 15-78 """ raise argparse.ArgumentTypeError(err_msg) @@ -236,7 +238,7 @@ def check_pct_pft (self): For example, --dompft 8 --pctpft 0.4 0.6 should give an error. - Check if the sum of pct_pft is equal to 100% or 1. - For example, --dompft 8 15 --pctpft 0.6 0.9 should give an error. + For example, --dompft 8 14 --pctpft 0.6 0.9 should give an error. - If the sum of pct_pft is 1, convert it to % (multiply by 100) @@ -391,7 +393,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): if self.dom_pft is not None: max_dom_pft = max(self.dom_pft) #-- First initialize everything: - if max_dom_pft <=NAT_PFT : + if max_dom_pft < NAT_PFT : f_out ["PCT_NAT_PFT"][:,:,:] = 0 else: f_out["PCT_CFT"][:,:,:] = 0 @@ -404,11 +406,11 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): #-- loop over all dom_pft and pct_pft zip_pfts = zip (self.dom_pft, self.pct_pft) for dom_pft, pct_pft in zip_pfts: - if dom_pft <= NAT_PFT: - f_out['PCT_NAT_PFT'][:, :, dom_pft-1] = pct_pft - elif dom_pft > NAT_PFT: + if dom_pft < NAT_PFT: + f_out['PCT_NAT_PFT'][:, :, dom_pft] = pct_pft + else: dom_pft = dom_pft-NAT_PFT - f_out['PCT_CFT'][:, :, dom_pft-1] = pct_pft + f_out['PCT_CFT'][:, :, dom_pft] = pct_pft # ------------------------------- # By default include_nonveg=False @@ -423,7 +425,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): f_out["PCT_GLACIER"][:, :] = 0.0 max_dom_pft = max(self.dom_pft) - if max_dom_pft <=NAT_PFT : + if max_dom_pft < NAT_PFT : f_out["PCT_NATVEG"][:, :] = 100 f_out["PCT_CROP"][:, :] = 0 else: From 138686b319d14f300960d7162a5698b951a60e7e Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 12:36:53 -0700 Subject: [PATCH 145/223] update my unit tests. --- python/ctsm/test/test_unit_singlept_data.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/python/ctsm/test/test_unit_singlept_data.py b/python/ctsm/test/test_unit_singlept_data.py index 6f1d14ffa9..46b1e19e37 100644 --- a/python/ctsm/test/test_unit_singlept_data.py +++ b/python/ctsm/test/test_unit_singlept_data.py @@ -154,7 +154,7 @@ def test_check_dom_pft_too_small(self): def test_check_dom_pft_numpft(self): """ Test check_dom_pft - When dom_pft < 16 but no crop (aka num_pft <16) + When dom_pft > 15 but no crop (aka num_pft <15) """ single_point = SinglePointCase( plat=self.plat, @@ -173,7 +173,7 @@ def test_check_dom_pft_numpft(self): cap_saturation=self.cap_saturation, out_dir=self.out_dir, ) - single_point.dom_pft = [16, 53] + single_point.dom_pft = [15, 53] single_point.num_pft = 16 with self.assertRaisesRegex(argparse.ArgumentTypeError, "Please use --crop*"): single_point.check_dom_pft() @@ -201,6 +201,7 @@ def test_check_dom_pft_mixed_range(self): out_dir=self.out_dir, ) single_point.dom_pft = [1, 5, 16] + single_point.num_pft = 78 with self.assertRaisesRegex( argparse.ArgumentTypeError, "mixed land units is not possible*" ): From f6fe40e30689497b64e5ce46f8ad0d2f46c892de Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 27 Jan 2022 12:48:52 -0700 Subject: [PATCH 146/223] move download_file to download_utils so that lilac test pass. --- python/ctsm/utils.py | 39 ------------------- .../modify_singlept_site_neon.py | 7 ++-- tools/site_and_regional/run_neon.py | 2 +- 3 files changed, 5 insertions(+), 43 deletions(-) diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 8b1373de3d..70bef1d951 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -9,8 +9,6 @@ from datetime import date from getpass import getuser -import requests - logger = logging.getLogger(__name__) def abort(errmsg): @@ -125,40 +123,3 @@ def update_metadata(file, title, summary, contact, data_script, description): for attr in del_attrs: if attr in attr_list: del file.attrs[attr] - -def download_file(url, fname): - """ - Function to download a file. - Args: - url (str): - url of the file for downloading - fname (str) : - file name to save the downloaded file. - - Raises: - Error : - When the file is not available on the server (status_code:404) - Error: - When download fails for any reason. - """ - try: - response = requests.get(url) - - # pylint: disable=broad-except - except Exception as err: - logger.warning ('The server could not fulfill the request.') - logger.warning ('Something went wrong in downloading: %s', fname) - err_msg = "Couldn't download file "+fname +"-- Error code:"+ err - abort(err_msg) - - with open(fname, "wb") as this_f: - this_f.write(response.content) - - # -- Check if download status_code - if response.status_code == 200: - logger.info("Download finished successfully for : %s", fname) - - elif response.status_code == 404: - logger.warning ('This file is not available on the server: %s', fname) - err_msg = "Couldn't download file "+fname +"-- Error code: "+ "404" - abort(err_msg) diff --git a/tools/site_and_regional/modify_singlept_site_neon.py b/tools/site_and_regional/modify_singlept_site_neon.py index 1024da4a6b..924bb7cccc 100755 --- a/tools/site_and_regional/modify_singlept_site_neon.py +++ b/tools/site_and_regional/modify_singlept_site_neon.py @@ -251,9 +251,9 @@ def find_surffile(surf_dir, site_name): """ # sf_name = "surfdata_hist_16pfts_Irrig_CMIP6_simyr2000_"+site_name+"*.nc" - sf_name = "surfdata_hist_78pfts_CMIP6_simyr2000_" + site_name + "*.nc" - # surf_file = glob.glob(os.path.join(surf_dir,sf_name)) - surf_file = sorted(glob.glob(surf_dir + "/" + sf_name)) + sf_name = "surfdata_*_hist_78pfts_CMIP6_simyr2000_" + site_name + "*.nc" + print (os.path.join(surf_dir , sf_name)) + surf_file = sorted(glob.glob(os.path.join(surf_dir , sf_name))) if len(surf_file) > 1: print("The following files found :", *surf_file, sep="\n- ") @@ -680,6 +680,7 @@ def main(): print("Updated : ", f2.PCT_CROP.values) print("Updating PCT_NAT_PFT") + #print (f2.PCT_NAT_PFT) print(f2.PCT_NAT_PFT.values[0]) f2.PCT_NAT_PFT.values[0] = [[100.0]] print(f2.PCT_NAT_PFT[0].values) diff --git a/tools/site_and_regional/run_neon.py b/tools/site_and_regional/run_neon.py index 378ea6d426..101948b02c 100755 --- a/tools/site_and_regional/run_neon.py +++ b/tools/site_and_regional/run_neon.py @@ -78,7 +78,7 @@ from ctsm import add_cime_to_path from ctsm.path_utils import path_to_ctsm_root -from ctsm.utils import download_file +from ctsm.download_utils import download_file import CIME.build as build from standard_script_setup import * From 2b30cb1f93b987f46a7b9540092bc9a2d0f0b9e3 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 28 Jan 2022 11:21:49 -0700 Subject: [PATCH 147/223] adding download_utils. --- python/ctsm/download_utils.py | 47 +++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 python/ctsm/download_utils.py diff --git a/python/ctsm/download_utils.py b/python/ctsm/download_utils.py new file mode 100644 index 0000000000..c6b5cee3ec --- /dev/null +++ b/python/ctsm/download_utils.py @@ -0,0 +1,47 @@ +"""General-purpose utility functions""" + +import logging + +import requests + +from ctsm.utils import abort + +logger = logging.getLogger(__name__) + + +def download_file(url, fname): + """ + Function to download a file. + Args: + url (str): + url of the file for downloading + fname (str) : + file name to save the downloaded file. + + Raises: + Error : + When the file is not available on the server (status_code:404) + Error: + When download fails for any reason. + """ + try: + response = requests.get(url) + + # pylint: disable=broad-except + except Exception as err: + logger.warning("The server could not fulfill the request.") + logger.warning("Something went wrong in downloading: %s", fname) + err_msg = "Couldn't download file " + fname + "-- Error code:" + err + abort(err_msg) + + with open(fname, "wb") as this_f: + this_f.write(response.content) + + # -- Check if download status_code + if response.status_code == 200: + logger.info("Download finished successfully for : %s", fname) + + elif response.status_code == 404: + logger.warning("This file is not available on the server: %s", fname) + err_msg = "Couldn't download file " + fname + "-- Error code: " + "404" + abort(err_msg) From 1dd54c9fcd2d1856a4045738c8b909827c5fab83 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 28 Jan 2022 15:30:54 -0700 Subject: [PATCH 148/223] one more comment. --- python/ctsm/subset_data.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 1d6ea502c6..db1b9ecec7 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -219,7 +219,6 @@ def get_parser(): action="store_true", dest="create_mesh", required=False, - default=False, ) # -- common options between both subparsers From 9aafcbb80cb8c96bdcff78c42242600ee441784b Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 28 Jan 2022 15:35:06 -0700 Subject: [PATCH 149/223] fixing landuse name --- python/ctsm/site_and_regional/single_point_case.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index f3aad5d55f..824ab82be2 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -362,7 +362,7 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): # write to user_nl_clm data if specified if self.create_user_mods: with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm: - line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) + line = "flanduse_timeseries = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) self.write_to_file(line, nl_clm) def create_surfdata_at_point(self, indir, file, user_mods_dir): From 88c074b7598d88d7116f46621e788eff2d19152e Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 28 Jan 2022 15:39:36 -0700 Subject: [PATCH 150/223] fixed typos. --- python/ctsm/site_and_regional/single_point_case.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 824ab82be2..829902e744 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -204,9 +204,9 @@ def check_nonveg (self): when zero out non veg land units (by default true) and not provide a dominant pft: The user can run ./subset_data using: - ./subset_data point --dom-pft + ./subset_data point --dompft ./subset_data point --include-nonveg - ./subset_data point --dom-pft --include-nonveg + ./subset_data point --dompft --include-nonveg But this will raise an error: ./subset_data point From 34325817343a861f994cef6e03f59b2e01e95f90 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 28 Jan 2022 15:46:00 -0700 Subject: [PATCH 151/223] update README. --- tools/site_and_regional/README | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/site_and_regional/README b/tools/site_and_regional/README index 957e26b9a2..930a07a7ac 100644 --- a/tools/site_and_regional/README +++ b/tools/site_and_regional/README @@ -21,7 +21,7 @@ subset_data modify_singlept_site_neon.py After running subset_data.py overwrite some fields with site-specific - data for neon sites. This code uses neon_sites_dompft.csv to determine dom-pft values. + data for neon sites. run_neon.py Wrapper script for running CTSM simulations for one or more @@ -30,6 +30,7 @@ run_neon.py neon_surf_wrapper.py Wrapper script that run subset_data to extract data for all neon points and then use modify_singlept_site_neon.py to update site-specific fields. + This code uses neon_sites_dompft.csv to determine --dompft (dominant pft types) values. neon_s3_upload Script to rename and upload NEON site finidat files to NEON s3 bucket From 237ae6859b4304b5e235328f5d5fbc1a2627816e Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 28 Jan 2022 15:48:46 -0700 Subject: [PATCH 152/223] removing --help line. --- python/ctsm/subset_data.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index db1b9ecec7..a89203a8ca 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -535,7 +535,6 @@ def main(): err_msg = textwrap.dedent('''\ \n ------------------------------------ \n Must supply a positional argument: 'point' or 'region'. - \n See ./subset_data --help for more help. ''' ) raise parser.error(err_msg) @@ -545,7 +544,6 @@ def main(): \n ------------------------------------ \n Must supply one of: \n --create-surface \n --create-landuse \n --create-datm \n --create-domain \n - \n See ./subset_data --help for more help. ''' ) raise parser.error(err_msg) From 16db43b9d9042f4f03b0a0d1cc9c70c514db57a5 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Sun, 30 Jan 2022 08:58:40 -0700 Subject: [PATCH 153/223] Introduce local variable in plon_type This avoids changing the type of an existing variable. --- python/ctsm/args_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/ctsm/args_utils.py b/python/ctsm/args_utils.py index 1d8ee8d2fc..d944227157 100644 --- a/python/ctsm/args_utils.py +++ b/python/ctsm/args_utils.py @@ -44,10 +44,10 @@ def plon_type(plon): Returns: plon_out (float): converted longitude between 0 and 360 """ - plon = float(plon) - if plon < -180 or plon > 360: + plon_float = float(plon) + if plon_float < -180 or plon_float > 360: raise argparse.ArgumentTypeError( "ERROR: Longitude should be between 0 and 360 or -180 and 180." ) - plon_out = lon_range_0_to_360(plon) + plon_out = lon_range_0_to_360(plon_float) return plon_out From 9ef390105224a964e990c1bf82163be873b101db Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Sun, 30 Jan 2022 11:56:35 -0700 Subject: [PATCH 154/223] Restore on/off options I think these are needed for consistency with standard config file parsing --- python/ctsm/config_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/ctsm/config_utils.py b/python/ctsm/config_utils.py index 41a023f375..0ece3a180a 100644 --- a/python/ctsm/config_utils.py +++ b/python/ctsm/config_utils.py @@ -156,9 +156,9 @@ def _convert_to_bool(var): Returns: var_out (bool): Boolean value corresponding to the input. """ - if var.lower() in ("yes", "true", "t", "y", "1"): + if var.lower() in ("yes", "true", "t", "y", "1", "on"): var_out = True - elif var.lower() in ("no", "false", "f", "n", "0"): + elif var.lower() in ("no", "false", "f", "n", "0", "off"): var_out = False else: raise ValueError("Boolean value expected. [true or false] or [y or n]") From d09113dfcca24cc3c539a62443f24a338168e615 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Sun, 30 Jan 2022 12:04:03 -0700 Subject: [PATCH 155/223] Fix some documentation --- python/ctsm/git_utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py index 74f5a8af4b..1764103c73 100644 --- a/python/ctsm/git_utils.py +++ b/python/ctsm/git_utils.py @@ -9,7 +9,7 @@ def get_ctsm_git_short_hash(): """ - Returns Git short SHA for the currect directory. + Returns Git short SHA for the CTSM repository. Args: @@ -30,7 +30,7 @@ def get_ctsm_git_short_hash(): def get_ctsm_git_long_hash(): """ - Returns Git long SHA for the currect directory. + Returns Git long SHA for the CTSM repository. Args: @@ -49,14 +49,14 @@ def get_ctsm_git_long_hash(): def get_ctsm_git_describe(): """ - Function for giving the recent tag of the git repo + Function for giving the recent tag of the CTSM repository Args: Raises: Returns: - label (str) : ouput of running 'git describe' in shell + label (str) : ouput of running 'git describe' for the CTSM repository """ label = ( subprocess.check_output(["git", "-C", path_to_ctsm_root(), "describe"]) From 23373639ee97ba6e34082769f8a96326e43d4902 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 1 Feb 2022 11:36:55 -0700 Subject: [PATCH 156/223] Update to current externals in cesm2_3_alpha08b --- Externals.cfg | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 1c2293d02f..5e82db0558 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -8,7 +8,7 @@ required = True local_path = components/cism protocol = git repo_url = https://github.com/ESCOMP/CISM-wrapper -tag = cismwrap_2_1_93 +tag = cismwrap_2_1_95 externals = Externals_CISM.cfg required = True @@ -33,22 +33,29 @@ repo_url = https://github.com/nmizukami/mizuRoute hash = 34723c2 required = True +[ccs_config] +tag = ccs_config_cesm0.0.5 +protocol = git +repo_url = https://github.com/ESMCI/ccs_config_cesm.git +local_path = ccs_config +required = True + [cime] local_path = cime protocol = git repo_url = https://github.com/ESMCI/cime -tag = branch_tags/cime6.0.12_a01 +tag = cime6.0.13 required = True [cmeps] -tag = cmeps0.13.43 +tag = cmeps0.13.47 protocol = git repo_url = https://github.com/ESCOMP/CMEPS.git local_path = components/cmeps required = True [cdeps] -tag = cdeps0.12.34 +tag = cdeps0.12.37 protocol = git repo_url = https://github.com/ESCOMP/CDEPS.git local_path = components/cdeps @@ -56,7 +63,7 @@ externals = Externals_CDEPS.cfg required = True [cpl7] -tag = cpl7.0.7 +tag = cpl7.0.11 protocol = git repo_url = https://github.com/ESCOMP/CESM_CPL7andDataComps local_path = components/cpl7 @@ -77,7 +84,7 @@ local_path = libraries/mct required = True [parallelio] -tag = pio2_5_4 +tag = pio2_5_5 protocol = git repo_url = https://github.com/NCAR/ParallelIO local_path = libraries/parallelio From 0044189b1caabfae726551ab70254bff4a91d884 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Tue, 1 Feb 2022 13:59:55 -0700 Subject: [PATCH 157/223] Add get_average_days_per_year function For now, use hard-coded parameters (see https://github.com/ESCOMP/CTSM/issues/1624). --- src/utils/clm_time_manager.F90 | 61 ++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/src/utils/clm_time_manager.F90 b/src/utils/clm_time_manager.F90 index 78c49a75eb..f598dd65d3 100644 --- a/src/utils/clm_time_manager.F90 +++ b/src/utils/clm_time_manager.F90 @@ -37,6 +37,7 @@ module clm_time_manager get_curr_calday, &! return calendar day at end of current timestep get_calday, &! return calendar day from input date get_calendar, &! return calendar + get_average_days_per_year,&! return the average number of days per year for the given calendar get_curr_days_per_year, &! return the days per year for year as of the end of the current time step get_prev_days_per_year, &! return the days per year for year as of the beginning of the current time step get_curr_yearfrac, &! return the fractional position in the current year, as of the end of the current timestep @@ -122,6 +123,9 @@ module clm_time_manager private :: TimeGetymd private :: check_timemgr_initialized + character(len=*), parameter, private :: sourcefile = & + __FILE__ + !========================================================================================= contains !========================================================================================= @@ -1229,6 +1233,63 @@ end function get_calendar !========================================================================================= + real(r8) function get_average_days_per_year() + + !--------------------------------------------------------------------------------- + ! Get the average number of days per year for the given calendar. + ! + ! This should be used, for example, when converting a parameter from units of + ! per-year to units of per-second (so that the parameter will have a fixed, constant + ! value rather than a slightly different value on leap years vs. non-leap years). + + real(r8) :: avg_days_per_year + real(r8) :: curr_days_per_year + + real(r8), parameter :: days_per_year_noleap = 365._r8 + + ! From the definition of ESMF_CALKIND_GREGORIAN in + ! https://earthsystemmodeling.org/docs/release/latest/ESMF_refdoc/node6.html: "In the + ! Gregorian calendar every fourth year is a leap year in which February has 29 and not + ! 28 days; however, years divisible by 100 are not leap years unless they are also + ! divisible by 400." This results in an average number of days per year of 365.2425. + real(r8), parameter :: days_per_year_gregorian = 365.2425_r8 + + character(len=*), parameter :: subname = 'get_average_days_per_year' + !--------------------------------------------------------------------------------- + + ! BUG(wjs, 2022-02-01, ESCOMP/CTSM#1624) Ideally we would use ESMF_CalendarGet here, + ! but that currently isn't possible (see notes in issue 1624 for details) + if (to_upper(calendar) == NO_LEAP_C) then + avg_days_per_year = days_per_year_noleap + else if (to_upper(calendar) == GREGORIAN_C) then + avg_days_per_year = days_per_year_gregorian + else + call shr_sys_abort(subname//' ERROR: unrecognized calendar specified= '//trim(calendar)) + end if + + ! Paranoia: Since we're using a hard-coded value, let's make sure that the user hasn't + ! done some customizations to the calendar that change the days per year from what we + ! expect: Compare the hard-coded value with the number of days per year in the + ! current year, which comes from the actual ESMF calendar; the two should be close. + ! (This check can be removed once we address issue 1624, making the results of this + ! function depend on the actual ESMF calendar instead of a hard-coded value.) + curr_days_per_year = get_curr_days_per_year() + if (abs(avg_days_per_year - curr_days_per_year) > 1._r8) then + write(iulog,*) 'ERROR: hard-coded average days per year differs by more than expected' + write(iulog,*) 'from current days per year. Are you using a non-standard calendar?' + write(iulog,*) 'avg_days_per_year (hard-coded) = ', avg_days_per_year + write(iulog,*) 'curr_days_per_year (from ESMF calendar) = ', curr_days_per_year + write(iulog,*) 'You can fix this by changing the hard-coded parameters in '//subname + write(iulog,*) 'in file: '//sourcefile + call shr_sys_abort(subname//' ERROR: hard-coded average days per year differs by more than expected') + end if + + get_average_days_per_year = avg_days_per_year + + end function get_average_days_per_year + + !========================================================================================= + integer function get_curr_days_per_year( offset ) !--------------------------------------------------------------------------------- From bb99ffeb078ee9370e8a901e14ae636466ede940 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Tue, 1 Feb 2022 14:14:09 -0700 Subject: [PATCH 158/223] Use average days per year when converting parameter units Resolves ESCOMP/CTSM#1612 --- src/biogeochem/CNC14DecayMod.F90 | 4 +-- src/biogeochem/CNGapMortalityMod.F90 | 4 +-- src/biogeochem/CNPhenologyMod.F90 | 28 ++++++++++--------- .../SoilBiogeochemDecompCascadeBGCMod.F90 | 4 +-- 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/src/biogeochem/CNC14DecayMod.F90 b/src/biogeochem/CNC14DecayMod.F90 index 2fdee93d8f..d929b80c33 100644 --- a/src/biogeochem/CNC14DecayMod.F90 +++ b/src/biogeochem/CNC14DecayMod.F90 @@ -5,7 +5,7 @@ module CNC14DecayMod ! ! !USES: use shr_kind_mod , only : r8 => shr_kind_r8 - use clm_time_manager , only : get_step_size_real, get_curr_days_per_year + use clm_time_manager , only : get_step_size_real, get_average_days_per_year use clm_varpar , only : nlevdecomp, ndecomp_pools use clm_varcon , only : secspday use clm_varctl , only : spinup_state @@ -87,7 +87,7 @@ subroutine C14Decay( bounds, num_soilc, filter_soilc, num_soilp, filter_soilp, & ! set time steps dt = get_step_size_real() - days_per_year = get_curr_days_per_year() + days_per_year = get_average_days_per_year() half_life = 5730._r8 * secspday * days_per_year decay_const = - log(0.5_r8) / half_life diff --git a/src/biogeochem/CNGapMortalityMod.F90 b/src/biogeochem/CNGapMortalityMod.F90 index a41542f3a1..29c5353e5e 100644 --- a/src/biogeochem/CNGapMortalityMod.F90 +++ b/src/biogeochem/CNGapMortalityMod.F90 @@ -88,7 +88,7 @@ subroutine CNGapMortality (bounds, num_soilc, filter_soilc, num_soilp, filter_so ! Gap-phase mortality routine for coupled carbon-nitrogen code (CN) ! ! !USES: - use clm_time_manager , only: get_curr_days_per_year + use clm_time_manager , only: get_average_days_per_year use clm_varpar , only: nlevdecomp_full use clm_varcon , only: secspday use clm_varctl , only: use_cndv, spinup_state @@ -180,7 +180,7 @@ subroutine CNGapMortality (bounds, num_soilc, filter_soilc, num_soilp, filter_so end if - m = am/(get_curr_days_per_year() * secspday) + m = am/(get_average_days_per_year() * secspday) !------------------------------------------------------ ! patch-level gap mortality carbon fluxes diff --git a/src/biogeochem/CNPhenologyMod.F90 b/src/biogeochem/CNPhenologyMod.F90 index 1c12747d72..8b0c046ee9 100644 --- a/src/biogeochem/CNPhenologyMod.F90 +++ b/src/biogeochem/CNPhenologyMod.F90 @@ -589,7 +589,7 @@ subroutine CNEvergreenPhenology (num_soilp, filter_soilp , & ! ! !USES: use clm_varcon , only : secspday - use clm_time_manager , only : get_curr_days_per_year + use clm_time_manager , only : get_average_days_per_year use clm_varctl , only : CN_evergreen_phenology_opt ! ! !ARGUMENTS: @@ -602,7 +602,7 @@ subroutine CNEvergreenPhenology (num_soilp, filter_soilp , & type(cnveg_nitrogenflux_type) , intent(inout) :: cnveg_nitrogenflux_inst ! ! !LOCAL VARIABLES: - real(r8):: dayspyr ! Days per year + real(r8):: avg_dayspyr ! Average days per year integer :: p ! indices integer :: fp ! lake filter patch index @@ -677,12 +677,12 @@ subroutine CNEvergreenPhenology (num_soilp, filter_soilp , & lgsf => cnveg_state_inst%lgsf_patch & ! Output: [real(r8) (:) ] long growing season factor [0-1] ) - dayspyr = get_curr_days_per_year() + avg_dayspyr = get_average_days_per_year() do fp = 1,num_soilp p = filter_soilp(fp) if (evergreen(ivt(p)) == 1._r8) then - bglfr(p) = 1._r8/(leaf_long(ivt(p)) * dayspyr * secspday) + bglfr(p) = 1._r8/(leaf_long(ivt(p)) * avg_dayspyr * secspday) bgtr(p) = 0._r8 lgsf(p) = 0._r8 end if @@ -1204,7 +1204,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , & ! per year. ! ! !USES: - use clm_time_manager , only : get_curr_days_per_year + use clm_time_manager , only : get_average_days_per_year use CNSharedParamsMod, only : use_fun use clm_varcon , only : secspday use shr_const_mod , only : SHR_CONST_TKFRZ, SHR_CONST_PI @@ -1227,7 +1227,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , & real(r8),parameter :: secspqtrday = secspday / 4 ! seconds per quarter day integer :: g,c,p ! indices integer :: fp ! lake filter patch index - real(r8):: dayspyr ! days per year + real(r8):: avg_dayspyr ! average days per year real(r8):: crit_onset_gdd ! degree days for onset trigger real(r8):: soilt ! temperature of top soil layer real(r8):: psi ! water stress of top soil layer @@ -1322,8 +1322,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , & deadcrootn_storage_to_xfer => cnveg_nitrogenflux_inst%deadcrootn_storage_to_xfer_patch & ! Output: [real(r8) (:) ] ) - ! set time steps - dayspyr = get_curr_days_per_year() + avg_dayspyr = get_average_days_per_year() ! specify rain threshold for leaf onset rain_threshold = 20._r8 @@ -1572,7 +1571,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , & ! calculate long growing season factor (lgsf) ! only begin to calculate a lgsf greater than 0.0 once the number ! of days active exceeds days/year. - lgsf(p) = max(min(3.0_r8*(days_active(p)-leaf_long(ivt(p))*dayspyr )/dayspyr, 1._r8),0._r8) + lgsf(p) = max(min(3.0_r8*(days_active(p)-leaf_long(ivt(p))*avg_dayspyr )/avg_dayspyr, 1._r8),0._r8) ! RosieF. 5 Nov 2015. Changed this such that the increase in leaf turnover is faster after ! trees enter the 'fake evergreen' state. Otherwise, they have a whole year of ! cheating, with less litterfall than they should have, resulting in very high LAI. @@ -1587,7 +1586,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , & ! calculate the background litterfall rate (bglfr) ! in units 1/s, based on leaf longevity (yrs) and correction for long growing season - bglfr(p) = (1._r8/(leaf_long(ivt(p))*dayspyr*secspday))*lgsf(p) + bglfr(p) = (1._r8/(leaf_long(ivt(p))*avg_dayspyr*secspday))*lgsf(p) end if ! set background transfer rate when active but not in the phenological onset period @@ -1598,7 +1597,7 @@ subroutine CNStressDecidPhenology (num_soilp, filter_soilp , & ! in complete turnover of the storage pools in one year at steady state, ! once lgsf has reached 1.0 (after 730 days active). - bgtr(p) = (1._r8/(dayspyr*secspday))*lgsf(p) + bgtr(p) = (1._r8/(avg_dayspyr*secspday))*lgsf(p) ! set carbon fluxes for shifting storage pools to transfer pools @@ -1645,6 +1644,7 @@ subroutine CropPhenology(num_pcropp, filter_pcropp , & ! !USES: use clm_time_manager , only : get_curr_date, get_curr_calday, get_curr_days_per_year, get_rad_step_size + use clm_time_manager , only : get_average_days_per_year use pftconMod , only : ntmp_corn, nswheat, nwwheat, ntmp_soybean use pftconMod , only : nirrig_tmp_corn, nirrig_swheat, nirrig_wwheat, nirrig_tmp_soybean use pftconMod , only : ntrp_corn, nsugarcane, ntrp_soybean, ncotton, nrice @@ -1684,7 +1684,8 @@ subroutine CropPhenology(num_pcropp, filter_pcropp , & integer h ! hemisphere indices integer idpp ! number of days past planting real(r8) :: dtrad ! radiation time step delta t (seconds) - real(r8) dayspyr ! days per year + real(r8) dayspyr ! days per year in this year + real(r8) avg_dayspyr ! average number of days per year real(r8) crmcorn ! comparitive relative maturity for corn real(r8) ndays_on ! number of days to fertilize logical do_plant_normal ! are the normal planting rules defined and satisfied? @@ -1750,6 +1751,7 @@ subroutine CropPhenology(num_pcropp, filter_pcropp , & ! get time info dayspyr = get_curr_days_per_year() + avg_dayspyr = get_average_days_per_year() jday = get_curr_calday() call get_curr_date(kyr, kmo, kda, mcsec) dtrad = real( get_rad_step_size(), r8 ) @@ -2105,7 +2107,7 @@ subroutine CropPhenology(num_pcropp, filter_pcropp , & else if (hui(p) >= huigrain(p)) then cphase(p) = 3._r8 - bglfr(p) = 1._r8/(leaf_long(ivt(p))*dayspyr*secspday) + bglfr(p) = 1._r8/(leaf_long(ivt(p))*avg_dayspyr*secspday) end if ! continue fertilizer application while in phase 2; diff --git a/src/soilbiogeochem/SoilBiogeochemDecompCascadeBGCMod.F90 b/src/soilbiogeochem/SoilBiogeochemDecompCascadeBGCMod.F90 index 6749b8ebc4..5e21c96d2e 100644 --- a/src/soilbiogeochem/SoilBiogeochemDecompCascadeBGCMod.F90 +++ b/src/soilbiogeochem/SoilBiogeochemDecompCascadeBGCMod.F90 @@ -582,7 +582,7 @@ subroutine decomp_rate_constants_bgc(bounds, num_soilc, filter_soilc, & ! written by C. Koven based on original CLM4 decomposition cascade ! ! !USES: - use clm_time_manager , only : get_curr_days_per_year + use clm_time_manager , only : get_average_days_per_year use shr_const_mod , only : SHR_CONST_PI use clm_varcon , only : secspday ! @@ -654,7 +654,7 @@ subroutine decomp_rate_constants_bgc(bounds, num_soilc, filter_soilc, & errMsg(sourcefile, __LINE__)) endif - days_per_year = get_curr_days_per_year() + days_per_year = get_average_days_per_year() ! set "Q10" parameter Q10 = CNParamsShareInst%Q10 From c150d1ec95a11f9ddc8bbc315f9163bc618964ce Mon Sep 17 00:00:00 2001 From: Ryan Knox Date: Wed, 2 Feb 2022 08:50:26 -0700 Subject: [PATCH 159/223] Setting temporary fates external for testing FATES picard hydraulics solver --- Externals_CLM.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Externals_CLM.cfg b/Externals_CLM.cfg index a5fc21e0bb..fffa6fba70 100644 --- a/Externals_CLM.cfg +++ b/Externals_CLM.cfg @@ -1,8 +1,8 @@ [fates] local_path = src/fates protocol = git -repo_url = https://github.com/NGEET/fates -tag = sci.1.52.0_api.20.0.0 +repo_url = https://github.com/rgknox/fates +branch = sci.1.53.0_api.21.0.0-merge-picard required = True [externals_description] From 3ec3828890a3c239e7247f2eabe8565d5d592851 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Wed, 2 Feb 2022 12:00:23 -0700 Subject: [PATCH 160/223] Fix new MIMICS call to get days per year --- src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90 b/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90 index 677aa4b04d..6e7e6c4566 100644 --- a/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90 +++ b/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90 @@ -760,7 +760,7 @@ subroutine decomp_rates_mimics(bounds, num_soilc, filter_soilc, & ! decomposition cascade model ! ! !USES: - use clm_time_manager , only : get_curr_days_per_year + use clm_time_manager , only : get_average_days_per_year use clm_varcon , only : secspday, secsphr, tfrz use clm_varcon , only : g_to_mg, cm3_to_m3 ! @@ -873,7 +873,7 @@ subroutine decomp_rates_mimics(bounds, num_soilc, filter_soilc, & mino2lim = CNParamsShareInst%mino2lim - days_per_year = get_curr_days_per_year() + days_per_year = get_average_days_per_year() ! ! Set "decomp_depth_efolding" parameter ! decomp_depth_efolding = CNParamsShareInst%decomp_depth_efolding From 76f908f8c4ffff667e436d2a63dc2f0ac8bb6d9d Mon Sep 17 00:00:00 2001 From: Sean Swenson Date: Tue, 8 Feb 2022 13:08:17 -0700 Subject: [PATCH 161/223] remove irrigation flux from HydrologyDrainageMod --- src/biogeophys/HydrologyDrainageMod.F90 | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/biogeophys/HydrologyDrainageMod.F90 b/src/biogeophys/HydrologyDrainageMod.F90 index 4f9c549111..31ffc817a0 100644 --- a/src/biogeophys/HydrologyDrainageMod.F90 +++ b/src/biogeophys/HydrologyDrainageMod.F90 @@ -115,8 +115,7 @@ subroutine HydrologyDrainage(bounds, & qflx_runoff => waterfluxbulk_inst%qflx_runoff_col , & ! total runoff (qflx_drain+qflx_surf+qflx_qrgwl) (mm H2O /s) qflx_runoff_u => waterfluxbulk_inst%qflx_runoff_u_col , & ! Urban total runoff (qflx_drain+qflx_surf) (mm H2O /s) qflx_runoff_r => waterfluxbulk_inst%qflx_runoff_r_col , & ! Rural total runoff (qflx_drain+qflx_surf+qflx_qrgwl) (mm H2O /s) - qflx_ice_runoff_snwcp => waterfluxbulk_inst%qflx_ice_runoff_snwcp_col, & ! solid runoff from snow capping (mm H2O /s) - qflx_sfc_irrig => waterfluxbulk_inst%qflx_sfc_irrig_col & ! surface irrigation flux (mm H2O /s) + qflx_ice_runoff_snwcp => waterfluxbulk_inst%qflx_ice_runoff_snwcp_col & ! solid runoff from snow capping (mm H2O /s) ) ! Determine time step and step size @@ -217,9 +216,6 @@ subroutine HydrologyDrainage(bounds, & qflx_runoff(c) = qflx_drain(c) + qflx_surf(c) + qflx_qrgwl(c) + qflx_drain_perched(c) - if ((lun%itype(l)==istsoil .or. lun%itype(l)==istcrop) .and. col%active(c)) then - qflx_runoff(c) = qflx_runoff(c) - qflx_sfc_irrig(c) - end if if (lun%urbpoi(l)) then qflx_runoff_u(c) = qflx_runoff(c) else if (lun%itype(l)==istsoil .or. lun%itype(l)==istcrop) then From e6dd762355c41347f296d2b0f98d86c3f689d0d3 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 8 Feb 2022 21:10:12 -0700 Subject: [PATCH 162/223] Update cpl7 to version with fire-emission update that we need to coordinate in it --- Externals.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals.cfg b/Externals.cfg index 5e82db0558..21b6eb6bbb 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -63,7 +63,7 @@ externals = Externals_CDEPS.cfg required = True [cpl7] -tag = cpl7.0.11 +tag = cpl7.0.12 protocol = git repo_url = https://github.com/ESCOMP/CESM_CPL7andDataComps local_path = components/cpl7 From d66e83cf72d70211a5b3f4b1e73e9c18c7656460 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 8 Feb 2022 21:16:45 -0700 Subject: [PATCH 163/223] Add ccs_config directory to gitignore and remove the PTCLM clone directory --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 1c9f52fe14..9faff58438 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,8 @@ # directories checked out by manage_externals, and other files created # by manage_externals manage_externals.log +ccs_config /src/fates/ -/tools/site_and_regional/PTCLM/ /cime/ /components/ /libraries/ From 5804aa6f504b416d314305ead008f80dc551ff8c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 9 Feb 2022 00:04:55 -0700 Subject: [PATCH 164/223] adding constants for months. --- .../site_and_regional/single_point_case.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 829902e744..c9a0981339 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -21,6 +21,10 @@ NAT_PFT = 15 MAX_PFT = 78 +# -- constants to represent months of year +FIRST_MONTH = 1 +LAST_MONTH = 12 + class SinglePointCase(BaseCase): """ A class to encapsulate everything for single point cases. @@ -144,21 +148,21 @@ def check_dom_pft (self): same range. e.g. If users specified multiple dom_pft, they should be either in : - - 1-14 range + - 0 - NAT_PFT-1 range (i.e. 0-14) or - - 15-78 range + - NAT_PFT - MAX_PFT range (i.e. 15-78) - give an error : mixed land units not possible. dom_pft in netcdf: 1-15 which tranlate to 0-14 ------------- Raises: Error (ArgumentTypeError): - If any dom_pft is bigger than 78. + If any dom_pft is bigger than (MAX_PFT)78. Error (ArgumentTypeError): If any dom_pft is less than 1. Error (ArgumentTypeError): If mixed land units are chosen. - dom_pft values are both in range of 1-14 and 15-78. + dom_pft values are both in range of 0- NAT_PFT-1 (i.e. 0-14) and 15-78 (i.e. NAT_PFT - MAX_PFT). """ @@ -170,8 +174,8 @@ def check_dom_pft (self): min_dom_pft = min(self.dom_pft) max_dom_pft = max(self.dom_pft) - #-- check dom_pft values should be between 1-78 - if min_dom_pft <1 or max_dom_pft >MAX_PFT: + #-- check dom_pft values should be between 0-MAX_PFT + if min_dom_pft <0 or max_dom_pft >MAX_PFT: err_msg = "values for --dompft should be between 1 and 78." raise argparse.ArgumentTypeError(err_msg) @@ -582,7 +586,7 @@ def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_s tpqwfiles = [] for year in range(datm_syr, datm_eyr + 1): ystr = str(year) - for month in range(1, 13): + for month in range(FIRST_MONTH, LAST_MONTH + 1): mstr = str(month) if month < 10: mstr = "0" + mstr From 1e7c8aca5cf85d682eb86eb46826394adbe9d56a Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 9 Feb 2022 00:25:11 -0700 Subject: [PATCH 165/223] adding test cased for the edge cases --- python/ctsm/test/test_unit_args_utils.py | 38 ++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/python/ctsm/test/test_unit_args_utils.py b/python/ctsm/test/test_unit_args_utils.py index e8a29e189f..17e1a5b3a1 100755 --- a/python/ctsm/test/test_unit_args_utils.py +++ b/python/ctsm/test/test_unit_args_utils.py @@ -63,6 +63,30 @@ def test_plonType_outOfBounds_negative(self): ): _ = plon_type(-200) + # -- = -180 + def test_plonType_negative_180(self): + """ + Test for when plon values are -180 + """ + result = plon_type(-180) + self.assertEqual(result, 180.0) + + # -- = 0 + def test_plonType_zero(self): + """ + Test for when plon values are 0 + """ + result = plon_type(0) + self.assertEqual(result, 0) + + # -- = 360 + def test_plonType_positive_360(self): + """ + Test for when plon values are 360. + """ + result = plon_type(360) + self.assertEqual(result, 360.0) + class TestArgsPlat(unittest.TestCase): """ Tests for plat_type in args_util.py @@ -76,6 +100,20 @@ def test_platType_outOfBounds_positive(self): ): _ = plat_type(91) + def test_platType_outOfBounds_90(self): + """ + Test of plat_type is 90 + """ + result = plat_type(90) + self.assertEqual(result, 90.0) + + def test_platType_outOfBounds_90(self): + """ + Test of plat_type is -90 + """ + result = plat_type(-90) + self.assertEqual(result, -90.0) + def test_platType_outOfBounds_negative(self): """ Test of plat_type smaller than -90 From 36769b08b5d53b8d1086a4e9b868d43453a29f4a Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 9 Feb 2022 01:36:42 -0700 Subject: [PATCH 166/223] Update ndep file for SSP3-7.0 (fixing #1578) and fix ndep file for SSP2-4.5 for clm4_5 --- bld/namelist_files/namelist_defaults_ctsm.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 1ee46abecb..d6f34b318c 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -1492,7 +1492,7 @@ lnd/clm2/surfdata_map/release-clm5.0.30/surfdata_ne0np4.CONUS.ne30x8_hist_78pfts lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP2-4.5-WACCM_1849-2101_monthly_c191007.nc lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP3-7.0-WACCM_1849-2101_monthly_c191007.nc +>lnd/clm2/ndepdata/fndep_clm_SSP370_b.e21.BWSSP370cmip6.f09_g17.CMIP6-SSP3-7.0-WACCM.002_1849-2101_monthly_0.9x1.25_c211216.nc lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP5-8.5-WACCM_1849-2101_monthly_c191007.nc @@ -1501,16 +1501,16 @@ lnd/clm2/surfdata_map/release-clm5.0.30/surfdata_ne0np4.CONUS.ne30x8_hist_78pfts lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP2-4.5-WACCM_1849-2101_monthly_c191007.nc lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP3-7.0-WACCM_1849-2101_monthly_c191007.nc +>lnd/clm2/ndepdata/fndep_clm_SSP370_b.e21.BWSSP370cmip6.f09_g17.CMIP6-SSP3-7.0-WACCM.002_1849-2101_monthly_0.9x1.25_c211216.nc lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP5-8.5-WACCM_1849-2101_monthly_c191007.nc lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP1-2.6-WACCM_1849-2101_monthly_c191007.nc lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP5-8.5-WACCM_1849-2101_monthly_c191007.nc +>lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP2-4.5-WACCM_1849-2101_monthly_c191007.nc lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP3-7.0-WACCM_1849-2101_monthly_c191007.nc +>lnd/clm2/ndepdata/fndep_clm_SSP370_b.e21.BWSSP370cmip6.f09_g17.CMIP6-SSP3-7.0-WACCM.002_1849-2101_monthly_0.9x1.25_c211216.nc cycle NDEP_month From d367be4ea729f2c9fb8b81d49c011802e0a65fe2 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 9 Feb 2022 01:47:47 -0700 Subject: [PATCH 167/223] Make behavior of irrigate consistent for crop on or off, and only turn on for SSP scenarios for clm5_0 or clm5_1 fixing #509 --- bld/namelist_files/namelist_defaults_ctsm.xml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index d8bf3cf377..079ce80b56 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -123,14 +123,13 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/isotopes/atm_delta_C14_CMIP6_SSP5B_3x1_global_1850-2100_yearly_c181209.nc -.true. -.false. -.true. -.false. -.false. - -.false. -.true. +.true. +.false. +.true. +.false. +.false. + +.false. From 1859cd9e82c8e150c8290922501a7c9da8a357c4 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 9 Feb 2022 01:53:48 -0700 Subject: [PATCH 168/223] Switch FatesCH4 test for FATES off fixing #1526 --- cime_config/testdefs/testlist_clm.xml | 4 ++-- cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/README | 2 -- .../testdefs/testmods_dirs/clm/FatesColdDefCH4Off/README | 1 + .../{FatesColdDefCH4 => FatesColdDefCH4Off}/include_user_mods | 0 .../clm/{FatesColdDefCH4 => FatesColdDefCH4Off}/user_nl_clm | 2 +- 5 files changed, 4 insertions(+), 5 deletions(-) delete mode 100644 cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/README create mode 100644 cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/README rename cime_config/testdefs/testmods_dirs/clm/{FatesColdDefCH4 => FatesColdDefCH4Off}/include_user_mods (100%) rename cime_config/testdefs/testmods_dirs/clm/{FatesColdDefCH4 => FatesColdDefCH4Off}/user_nl_clm (62%) diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index 34b4c7363d..dd40b43f2a 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -2291,7 +2291,7 @@ - + @@ -2299,7 +2299,7 @@ - + diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/README b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/README deleted file mode 100644 index 299d5cf468..0000000000 --- a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/README +++ /dev/null @@ -1,2 +0,0 @@ -This test mod outputs an optional text file containing a table of the -history fields master list. diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/README b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/README new file mode 100644 index 0000000000..0af53362c4 --- /dev/null +++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/README @@ -0,0 +1 @@ +Test with FATES with methane off which is normally on by default diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/include_user_mods similarity index 100% rename from cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/include_user_mods rename to cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/include_user_mods diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/user_nl_clm similarity index 62% rename from cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/user_nl_clm rename to cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/user_nl_clm index b01aafeef5..4d7617fed4 100644 --- a/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdDefCH4Off/user_nl_clm @@ -1,2 +1,2 @@ -use_lch4 = .true. +use_lch4 = .false. hist_master_list_file = .true. From f4821044be5781bbe497eb0f90568e44d9d3c32e Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 9 Feb 2022 10:17:13 -0700 Subject: [PATCH 169/223] Had to add logic for fsurdat that if an exact match was not found, to also check for a file with irrig==TRUE, this is needed for #509 --- bld/CLMBuildNamelist.pm | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm index 30809918ee..b074f43b13 100755 --- a/bld/CLMBuildNamelist.pm +++ b/bld/CLMBuildNamelist.pm @@ -2269,7 +2269,14 @@ sub setup_logic_surface_dataset { add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, 'hgrid'=>$nl_flags->{'res'}, 'ssp_rcp'=>$nl_flags->{'ssp_rcp'}, 'use_vichydro'=>$nl_flags->{'use_vichydro'}, 'sim_year'=>$nl_flags->{'sim_year'}, 'irrigate'=>$nl_flags->{'irrigate'}, - 'use_crop'=>$nl_flags->{'use_crop'}, 'glc_nec'=>$nl_flags->{'glc_nec'}); + 'use_crop'=>$nl_flags->{'use_crop'}, 'glc_nec'=>$nl_flags->{'glc_nec'}, 'nofail'=>1 ); + if ( ! defined($nl->get_value($var) ) ) { + $log->verbose_message( "Exact match of $var NOT found, searching for version with irrigate true" ); + } + add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, + 'hgrid'=>$nl_flags->{'res'}, 'ssp_rcp'=>$nl_flags->{'ssp_rcp'}, 'use_vichydro'=>$nl_flags->{'use_vichydro'}, + 'sim_year'=>$nl_flags->{'sim_year'}, 'irrigate'=>".true.", + 'use_crop'=>$nl_flags->{'use_crop'}, 'glc_nec'=>$nl_flags->{'glc_nec'} ); } } From 6346ec595c6546437aeb4fffe6e0b05b3df1e1f0 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 9 Feb 2022 11:39:17 -0700 Subject: [PATCH 170/223] updating single_point docs --- .../ctsm/site_and_regional/single_point_case.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index c9a0981339..87d8bb3d0d 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -148,21 +148,20 @@ def check_dom_pft (self): same range. e.g. If users specified multiple dom_pft, they should be either in : - - 0 - NAT_PFT-1 range (i.e. 0-14) + - 0 - NAT_PFT-1 range or - - NAT_PFT - MAX_PFT range (i.e. 15-78) + - NAT_PFT - MAX_PFT range - give an error : mixed land units not possible. - dom_pft in netcdf: 1-15 which tranlate to 0-14 ------------- Raises: Error (ArgumentTypeError): - If any dom_pft is bigger than (MAX_PFT)78. + If any dom_pft is bigger than MAX_PFT. Error (ArgumentTypeError): If any dom_pft is less than 1. Error (ArgumentTypeError): If mixed land units are chosen. - dom_pft values are both in range of 0- NAT_PFT-1 (i.e. 0-14) and 15-78 (i.e. NAT_PFT - MAX_PFT). + dom_pft values are both in range of (0 - NAT_PFT-1) and (NAT_PFT - MAX_PFT). """ @@ -193,9 +192,9 @@ def check_dom_pft (self): Subsetting using mixed land units is not possible. Please make sure all --dompft values are in only one of these ranges: - - 1-14 - - 15-78 - """ + - 0-{} natural pfts + - {}-{} crop pfts (cfts) + """.format(NAT_PFT-1, NAT_PFT, MAX_PFT) raise argparse.ArgumentTypeError(err_msg) def check_nonveg (self): From 124ee5a9024b40b8d9fc88309f027655bb4d6f76 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 9 Feb 2022 13:03:31 -0700 Subject: [PATCH 171/223] adding more unit tests to this code. --- python/ctsm/site_and_regional/single_point_case.py | 2 -- python/ctsm/test/test_unit_args_utils.py | 4 ++-- python/ctsm/test/test_unit_singlept_data.py | 6 +++--- 3 files changed, 5 insertions(+), 7 deletions(-) mode change 100644 => 100755 python/ctsm/test/test_unit_singlept_data.py diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 87d8bb3d0d..dbaa35d149 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -179,8 +179,6 @@ def check_dom_pft (self): raise argparse.ArgumentTypeError(err_msg) #-- check dom_pft vs num_pft - print (max_dom_pft) - print (self.num_pft) if self.num_pft -1 < max_dom_pft < MAX_PFT : err_msg = "Please use --crop flag when --dompft is above 15." raise argparse.ArgumentTypeError(err_msg) diff --git a/python/ctsm/test/test_unit_args_utils.py b/python/ctsm/test/test_unit_args_utils.py index 17e1a5b3a1..73aab9f6a8 100755 --- a/python/ctsm/test/test_unit_args_utils.py +++ b/python/ctsm/test/test_unit_args_utils.py @@ -100,14 +100,14 @@ def test_platType_outOfBounds_positive(self): ): _ = plat_type(91) - def test_platType_outOfBounds_90(self): + def test_platType_outOfBounds_pos90(self): """ Test of plat_type is 90 """ result = plat_type(90) self.assertEqual(result, 90.0) - def test_platType_outOfBounds_90(self): + def test_platType_outOfBounds_neg90(self): """ Test of plat_type is -90 """ diff --git a/python/ctsm/test/test_unit_singlept_data.py b/python/ctsm/test/test_unit_singlept_data.py old mode 100644 new mode 100755 index 46b1e19e37..bfd249cc4d --- a/python/ctsm/test/test_unit_singlept_data.py +++ b/python/ctsm/test/test_unit_singlept_data.py @@ -145,7 +145,7 @@ def test_check_dom_pft_too_small(self): cap_saturation=self.cap_saturation, out_dir=self.out_dir, ) - single_point.dom_pft = [16, 36, 0] + single_point.dom_pft = [16, 36, -1] with self.assertRaisesRegex( argparse.ArgumentTypeError, "values for --dompft should*" ): @@ -154,7 +154,7 @@ def test_check_dom_pft_too_small(self): def test_check_dom_pft_numpft(self): """ Test check_dom_pft - When dom_pft > 15 but no crop (aka num_pft <15) + When dom_pft > 15 but no crop (aka num_pft =<15) """ single_point = SinglePointCase( plat=self.plat, @@ -200,7 +200,7 @@ def test_check_dom_pft_mixed_range(self): cap_saturation=self.cap_saturation, out_dir=self.out_dir, ) - single_point.dom_pft = [1, 5, 16] + single_point.dom_pft = [1, 5, 15] single_point.num_pft = 78 with self.assertRaisesRegex( argparse.ArgumentTypeError, "mixed land units is not possible*" From 324e9e7cf61f415ea965865d3e37d129bdb20838 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 9 Feb 2022 15:56:50 -0700 Subject: [PATCH 172/223] check to see if file exists --- python/ctsm/site_and_regional/base_case.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index b858b27176..b3fc1e27c2 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -6,6 +6,7 @@ # -- Import libraries # -- standard libraries +import os.path import logging from collections import namedtuple @@ -108,7 +109,12 @@ def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): """ logger.debug("Open file: %s", filename) - f_in = xr.open_dataset(filename) + + if os.path.exists: + f_in = xr.open_dataset(filename) + else: + err_msg = "File not found : " + filename + abort(err_msg) # create 1d coordinate variables to enable sel() method lon0 = np.asarray(f_in[lon_varname][0, :]) From dbc48b74c189d190332d45b890f609389d9c610c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 9 Feb 2022 18:11:38 -0700 Subject: [PATCH 173/223] updating capability to check if files exist before overwriting. --- python/ctsm/site_and_regional/base_case.py | 38 +++++++++++++++++-- .../ctsm/site_and_regional/regional_case.py | 12 ++++-- .../site_and_regional/single_point_case.py | 20 +++++----- python/ctsm/subset_data.py | 8 ++++ 4 files changed, 61 insertions(+), 17 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index b3fc1e27c2..90c6afabbe 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -18,6 +18,7 @@ import xarray as xr # -- import local classes for this script +from ctsm.utils import abort from ctsm.git_utils import get_ctsm_git_short_hash USRDAT_DIR = "CLM_USRDAT_DIR" @@ -46,6 +47,9 @@ class BaseCase: flag for creating DATM files create_user_mods flag for creating a user_mods directory + overwrite : bool + flag for overwriting if the file already exists + Methods ------- create_1d_coord(filename, lon_varname , lat_varname,x_dim , y_dim ) @@ -56,7 +60,7 @@ class BaseCase: """ def __init__(self, create_domain, create_surfdata, create_landuse, create_datm, - create_user_mods): + create_user_mods, overwrite): """ Initializes BaseCase with the given arguments. @@ -72,12 +76,15 @@ def __init__(self, create_domain, create_surfdata, create_landuse, create_datm, Flag for creating datm files a region/single point create_user_mods : bool Flag for creating user mods directories and files for running CTSM + overwrite : bool + flag for overwriting if the file already exists """ self.create_domain = create_domain self.create_surfdata = create_surfdata self.create_landuse = create_landuse self.create_datm = create_datm self.create_user_mods = create_user_mods + self.overwrite = overwrite def __str__(self): """ @@ -108,9 +115,10 @@ def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): f_out (xarray Dataset): Xarray Dataset with 1-d coords """ - logger.debug("Open file: %s", filename) - if os.path.exists: + if os.path.exists(filename): + logger.debug("Open file: %s", filename) + f_in = xr.open_dataset(filename) else: err_msg = "File not found : " + filename @@ -184,3 +192,27 @@ def write_to_file(text, file): Writes text to a file, surrounding text with \n characters """ file.write("\n{}\n".format(text)) + + def write_to_netcdf (self, xr_ds, nc_fname): + """ + Writes a netcdf file if + - the file does not exist. + or + - overwrite flag is chosen. + + Args: + xr_ds : Xarray Dataset + The xarray dataset that we are write out to netcdf file. + nc_fname : str + Netcdf file name + Raises: + Error and aborts the code if the file exists and --overwrite is not used. + """ + if not os.path.exists(nc_fname) or self.overwrite: + # mode 'w' overwrites file + xr_ds.to_netcdf(path=nc_fname, mode="w", format="NETCDF3_64BIT") + else: + err_msg = ("File "+nc_fname+ " already exists."+ + "\n Either remove the file or use --overwrite to overwrite the existing files.") + abort (err_msg) + diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 322f9494b9..eeff6f9dd7 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -43,6 +43,9 @@ class RegionalCase(BaseCase): flag for creating DATM files create_user_mods : bool flag for creating user mods files and folders + overwrite : bool + flag for over-writing files if they already exist + Methods ------- @@ -75,12 +78,13 @@ def __init__( create_datm, create_user_mods, out_dir, + overwrite, ): """ Initializes RegionalCase with the given arguments. """ super().__init__(create_domain, create_surfdata, create_landuse, create_datm, - create_user_mods) + create_user_mods, overwrite) self.lat1 = lat1 self.lat2 = lat2 self.lon1 = lon1 @@ -129,7 +133,7 @@ def create_domain_at_reg(self, indir, file): # mode 'w' overwrites file wfile = os.path.join(self.out_dir, fdomain_out) - f_out.to_netcdf(path=wfile, mode="w", format='NETCDF3_64BIT') + self.write_to_netcdf (f_out, wfile) logger.info("Successfully created file (fdomain_out) %s", wfile) f_in.close() f_out.close() @@ -163,7 +167,7 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir): # mode 'w' overwrites file wfile = os.path.join(self.out_dir, fsurf_out) - f_out.to_netcdf(path=wfile, mode="w", format='NETCDF3_64BIT') + self.write_to_netcdf (f_out, wfile) logger.info("created file (fsurf_out) %s", wfile) f_in.close() f_out.close() @@ -205,7 +209,7 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir): # mode 'w' overwrites file wfile = os.path.join(self.out_dir, fluse_out) - f_out.to_netcdf(path=wfile, mode="w", format='NETCDF3_64BIT') + self.write_to_netcdf (f_out, wfile) logger.info("Successfully created file (fluse_out) %s", wfile) f_in.close() f_out.close() diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index dbaa35d149..b9526bd0bb 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -61,6 +61,8 @@ class SinglePointCase(BaseCase): flag for creating datasets using uniform snowpack saturation_excess : bool flag for making dataset using saturation excess + overwrite : bool + flag for over-writing files if they already exist Methods ------- @@ -107,9 +109,10 @@ def __init__( uni_snow, cap_saturation, out_dir, + overwrite, ): super().__init__(create_domain, create_surfdata, create_landuse, create_datm, - create_user_mods) + create_user_mods, overwrite) self.plat = plat self.plon = plon self.site_name = site_name @@ -310,7 +313,7 @@ def create_domain_at_point(self, indir, file): f_out.attrs["Created_from"] = fdomain_in wfile = os.path.join(self.out_dir, fdomain_out) - f_out.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") + self.write_to_netcdf (f_out, wfile) logger.info("Successfully created file (fdomain_out) %s", wfile) f_in.close() f_out.close() @@ -354,8 +357,7 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): f_out.attrs["Created_from"] = fluse_in wfile = os.path.join(self.out_dir, fluse_out) - # mode 'w' overwrites file - f_out.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") + self.write_to_netcdf (f_out, wfile) logger.info("Successfully created file (fluse_out), %s", wfile) f_in.close() f_out.close() @@ -469,9 +471,9 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # update attributes self.update_metadata(f_out) f_out.attrs["Created_from"] = fsurf_in - # mode 'w' overwrites file + wfile = os.path.join(self.out_dir, fsurf_out) - f_out.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") + self.write_to_netcdf (f_out, wfile) logger.info("Successfully created file (fsurf_out) %s", wfile) f_in.close() f_out.close() @@ -510,9 +512,8 @@ def create_datmdomain_at_point(self, datm_tuple: DatmFiles): self.update_metadata(f_out) f_out.attrs["Created_from"] = fdatmdomain_in - # mode 'w' overwrites file wfile = os.path.join(self.out_dir, fdatmdomain_out) - f_out.to_netcdf(path=wfile, mode="w", format = 'NETCDF3_64BIT') + self.write_to_netcdf (f_out, wfile) logger.info("Successfully created file (fdatmdomain_out) : %s", wfile) f_in.close() f_out.close() @@ -537,8 +538,7 @@ def extract_datm_at(self, file_in, file_out): self.update_metadata(f_out) f_out.attrs["Created_from"] = file_in - # mode 'w' overwrites file - f_out.to_netcdf(path=file_out, mode="w") + self.write_to_netcdf (f_out, file_out) logger.info("Successfully created file : %s", file_out) f_in.close() f_out.close() diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index a89203a8ca..a1b8d21c15 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -308,6 +308,12 @@ def get_parser(): type=str, default="", ) + subparser.add_argument( + "--overwrite", + help="Flag to overwrite if the files already exists.", + action="store_true", + dest="overwrite", + ) add_logging_args(subparser) # -- print help for both subparsers @@ -440,6 +446,7 @@ def subset_point(args, file_dict: dict): uni_snow = args.uni_snow, cap_saturation = args.cap_saturation, out_dir = args.out_dir, + overwrite = args.overwrite, ) logger.debug(single_point) @@ -496,6 +503,7 @@ def subset_region(args, file_dict: dict): create_datm = args.create_datm, create_user_mods = args.create_user_mods, out_dir = args.out_dir, + overwrite = args.overwrite, ) logger.debug(region) From 50e20bb182a6eacb19dedb410373a76a915db108 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Wed, 9 Feb 2022 20:49:32 -0700 Subject: [PATCH 174/223] updating to not use file --- python/ctsm/site_and_regional/base_case.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 90c6afabbe..7f12771b67 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -57,6 +57,10 @@ class BaseCase: update_metadata(nc) Class method for adding some new attributes (such as date, username) and remove the old attributes from the netcdf file. + write_to_file: + Writes text to a file, surrounding text with \n characters + write_to_netcdf: + write xarray dataset to netcdf """ def __init__(self, create_domain, create_surfdata, create_landuse, create_datm, @@ -187,11 +191,11 @@ def update_metadata(nc_file): del nc_file.attrs[attr] @staticmethod - def write_to_file(text, file): + def write_to_file(text, file_out): """ Writes text to a file, surrounding text with \n characters """ - file.write("\n{}\n".format(text)) + file_out.write("\n{}\n".format(text)) def write_to_netcdf (self, xr_ds, nc_fname): """ From 740347ae27b2342318ae6404606df43c297663bd Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 13:30:07 -0700 Subject: [PATCH 175/223] updating the unit tests --- python/ctsm/test/test_unit_singlept_data.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/python/ctsm/test/test_unit_singlept_data.py b/python/ctsm/test/test_unit_singlept_data.py index bfd249cc4d..76371122bd 100755 --- a/python/ctsm/test/test_unit_singlept_data.py +++ b/python/ctsm/test/test_unit_singlept_data.py @@ -44,6 +44,7 @@ class TestSinglePointCase(unittest.TestCase): uni_snow = True cap_saturation = True out_dir = os.getcwd() + overwrite = False def test_create_tag_noname(self): """ @@ -65,6 +66,7 @@ def test_create_tag_noname(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, + overwrite = self.overwrite, ) single_point.create_tag() @@ -90,6 +92,7 @@ def test_create_tag_name(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, + overwrite = self.overwrite, ) single_point.site_name = "foo" single_point.create_tag() @@ -116,6 +119,7 @@ def test_check_dom_pft_too_big(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, + overwrite = self.overwrite, ) single_point.dom_pft = [16, 36, 79] with self.assertRaisesRegex( @@ -144,6 +148,7 @@ def test_check_dom_pft_too_small(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, + overwrite = self.overwrite, ) single_point.dom_pft = [16, 36, -1] with self.assertRaisesRegex( @@ -172,6 +177,7 @@ def test_check_dom_pft_numpft(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, + overwrite = self.overwrite, ) single_point.dom_pft = [15, 53] single_point.num_pft = 16 @@ -199,6 +205,7 @@ def test_check_dom_pft_mixed_range(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, + overwrite = self.overwrite, ) single_point.dom_pft = [1, 5, 15] single_point.num_pft = 78 @@ -228,6 +235,7 @@ def test_check_nonveg_nodompft(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, + overwrite = self.overwrite, ) single_point.dom_pft = None single_point.include_nonveg = False @@ -258,6 +266,7 @@ def test_check_pct_pft_notsamenumbers(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, + overwrite = self.overwrite, ) single_point.dom_pft = [1, 5] single_point.pct_pft = [0.5] @@ -287,6 +296,7 @@ def test_check_pct_pft_sum_not1(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, + overwrite = self.overwrite, ) single_point.dom_pft = [1, 5] single_point.pct_pft = [0.1, 0.5] @@ -316,6 +326,7 @@ def test_check_pct_pft_fraction_topct(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, + overwrite = self.overwrite, ) single_point.dom_pft = [1, 5, 8] single_point.pct_pft = [0.5, 0.4, 0.1] From 2bed6f7fb689e7805572e3492d3c42fb14e4ce22 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 13:51:51 -0700 Subject: [PATCH 176/223] seperate modify_surfdata_atpoint following Erik's suggestion. --- .../site_and_regional/single_point_case.py | 90 +++++++++++-------- 1 file changed, 53 insertions(+), 37 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index b9526bd0bb..e87bd74a5f 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -76,6 +76,9 @@ class SinglePointCase(BaseCase): create_landuse_at_point: Create landuse file at a single point. + modify_surfdata_atpoint: + Modify surface dataset based on combination of user choices. + create_surfdata_at_point: Create surface dataset at a single point. @@ -368,52 +371,35 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): line = "flanduse_timeseries = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) self.write_to_file(line, nl_clm) - def create_surfdata_at_point(self, indir, file, user_mods_dir): - """ - Create surface data file at a single point. - """ - # pylint: disable=too-many-statements - logger.info("----------------------------------------------------------------------") - logger.info( - "Creating surface dataset file at %s, %s", self.plon.__str__(), self.plat.__str__()) - - # specify file - fsurf_in = os.path.join(indir, file) - fsurf_out = add_tag_to_filename(fsurf_in, self.tag) - logger.info("fsurf_in: %s", fsurf_in) - logger.info("fsurf_out: %s", os.path.join(self.out_dir, fsurf_out)) - - # create 1d coordinate variables to enable sel() method - f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") - # extract gridcell closest to plon/plat - f_out = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") - # expand dimensions - f_out = f_out.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) + def modify_surfdata_atpoint (self, f_tmp): + """ + Function to modify surface dataset based on the user flags chosen. + """ #-- modify surface data properties if self.dom_pft is not None: max_dom_pft = max(self.dom_pft) #-- First initialize everything: if max_dom_pft < NAT_PFT : - f_out ["PCT_NAT_PFT"][:,:,:] = 0 + f_tmp ["PCT_NAT_PFT"][:,:,:] = 0 else: - f_out["PCT_CFT"][:,:,:] = 0 + f_tmp ["PCT_CFT"][:,:,:] = 0 # Do we need to initialize these here? # Because we set them in include_nonveg - #f_out["PCT_NATVEG"][:, :] = 0 - #f_out["PCT_CROP"][:, :] = 0 + #f_tmp["PCT_NATVEG"][:, :] = 0 + #f_tmp["PCT_CROP"][:, :] = 0 #-- loop over all dom_pft and pct_pft zip_pfts = zip (self.dom_pft, self.pct_pft) for dom_pft, pct_pft in zip_pfts: if dom_pft < NAT_PFT: - f_out['PCT_NAT_PFT'][:, :, dom_pft] = pct_pft + f_tmp['PCT_NAT_PFT'][:, :, dom_pft] = pct_pft else: dom_pft = dom_pft-NAT_PFT - f_out['PCT_CFT'][:, :, dom_pft] = pct_pft + f_tmp['PCT_CFT'][:, :, dom_pft] = pct_pft # ------------------------------- # By default include_nonveg=False @@ -422,27 +408,56 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): if not self.include_nonveg: logger.info ("Zeroing out non-vegetation land units in the surface data.") - f_out["PCT_LAKE"][:, :] = 0.0 - f_out["PCT_WETLAND"][:, :] = 0.0 - f_out["PCT_URBAN"][:, :] = 0.0 - f_out["PCT_GLACIER"][:, :] = 0.0 + f_tmp["PCT_LAKE"][:, :] = 0.0 + f_tmp["PCT_WETLAND"][:, :] = 0.0 + f_tmp["PCT_URBAN"][:, :] = 0.0 + f_tmp["PCT_GLACIER"][:, :] = 0.0 max_dom_pft = max(self.dom_pft) if max_dom_pft < NAT_PFT : - f_out["PCT_NATVEG"][:, :] = 100 - f_out["PCT_CROP"][:, :] = 0 + f_tmp["PCT_NATVEG"][:, :] = 100 + f_tmp["PCT_CROP"][:, :] = 0 else: - f_out["PCT_NATVEG"][:, :] = 0 - f_out["PCT_CROP"][:, :] = 100 + f_tmp["PCT_NATVEG"][:, :] = 0 + f_tmp["PCT_CROP"][:, :] = 100 else: logger.info ("You chose --include-nonveg --> \ Do not zero non-vegetation land units in the surface data.") if self.uni_snow: - f_out["STD_ELEV"][:, :] = 20.0 + f_tmp["STD_ELEV"][:, :] = 20.0 if self.cap_saturation: - f_out["FMAX"][:, :] = 0.0 + f_tmp["FMAX"][:, :] = 0.0 + + return f_tmp + + + def create_surfdata_at_point(self, indir, file, user_mods_dir): + """ + Create surface data file at a single point. + """ + # pylint: disable=too-many-statements + logger.info("----------------------------------------------------------------------") + logger.info( + "Creating surface dataset file at %s, %s", self.plon.__str__(), self.plat.__str__()) + + # specify file + fsurf_in = os.path.join(indir, file) + fsurf_out = add_tag_to_filename(fsurf_in, self.tag) + logger.info("fsurf_in: %s", fsurf_in) + logger.info("fsurf_out: %s", os.path.join(self.out_dir, fsurf_out)) + + # create 1d coordinate variables to enable sel() method + f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + + # extract gridcell closest to plon/plat + f_tmp = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") + + # expand dimensions + f_tmp = f_tmp.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) + + f_out = self.modify_surfdata_atpoint (f_tmp) # specify dimension order f_out = f_out.transpose( @@ -476,6 +491,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): self.write_to_netcdf (f_out, wfile) logger.info("Successfully created file (fsurf_out) %s", wfile) f_in.close() + f_tmp.close() f_out.close() # write to user_nl_clm if specified From ba98245409b6aaaba07c2c19f359c076f843ead4 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 16:11:55 -0700 Subject: [PATCH 177/223] adding some unit-testing for netcdf data... --- .../test/test_unit_singlept_data_surfdata.py | 691 ++++++++++++++++++ 1 file changed, 691 insertions(+) create mode 100755 python/ctsm/test/test_unit_singlept_data_surfdata.py diff --git a/python/ctsm/test/test_unit_singlept_data_surfdata.py b/python/ctsm/test/test_unit_singlept_data_surfdata.py new file mode 100755 index 0000000000..575f24745d --- /dev/null +++ b/python/ctsm/test/test_unit_singlept_data_surfdata.py @@ -0,0 +1,691 @@ +#!/usr/bin/env python3 +""" +Unit tests for creating and modifying surface datasets in SinglePointCase + +for the rest of SinglePointCase tests please see : test_unit_singlept_data + +You can run this by: + python -m unittest test_unit_singlept_data_surfdata.py +""" + +import unittest +import argparse +import os +import sys + + +import numpy as np +import xarray as xr + +# -- add python/ctsm to path (needed if we want to run the test stand-alone) +_CTSM_PYTHON = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir +) +sys.path.insert(1, _CTSM_PYTHON) + +# pylint: disable=wrong-import-position +from ctsm import unit_testing +from ctsm.site_and_regional.single_point_case import SinglePointCase + +# pylint: disable=invalid-name + + +class TestSinglePointCaseSurfaceNoCrop(unittest.TestCase): + """ + Basic class for testing creating and modifying surface dataset for + non-crop cases (aka using 16 pft dataset) in SinglePointCase class in single_point_case.py. + + """ + + plat = 20.1 + plon = 50.5 + site_name = None + create_domain = True + create_surfdata = True + create_landuse = True + create_datm = True + create_user_mods = True + dom_pft = [8] + pct_pft = None + num_pft = 16 + include_nonveg = False + uni_snow = True + cap_saturation = True + out_dir = os.getcwd() + overwrite = False + + # -- dimensions of xarray dataset + lsmlat = [plat] + lsmlon = [plon] + natpft = np.arange(0, 15, 1, dtype=int) + cft = np.arange(15, 17, 1, dtype=int) + numurbl = np.arange(0, 3, 1, dtype=int) + + ds_test = xr.Dataset( + { + "PCT_NATVEG": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={ + "long_name": "total percent natural vegetation landunit", + "units": "unitless", + }, + ), + "PCT_CROP": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={"long_name": "total percent crop landunit", "units": "unitless"}, + ), + "PCT_NAT_PFT": xr.DataArray( + data=np.random.rand(1, 1, 15), + dims=["lsmlat", "lsmlon", "natpft"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "natpft": natpft}, + attrs={ + "long_name": "percent plant functional type on the natural veg landunit", + "units": "unitless", + }, + ), + "PCT_CFT": xr.DataArray( + data=np.random.rand(1, 1, 2), + dims=["lsmlat", "lsmlon", "cft"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "cft": cft}, + attrs={ + "long_name": "percent crop functional type on the crop landunit", + "units": "unitless", + }, + ), + "PCT_LAKE": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={"long_name": "percent lake", "units": "unitless"}, + ), + "PCT_WETLAND": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={"long_name": "percent wetland", "units": "unitless"}, + ), + "PCT_URBAN": xr.DataArray( + data=np.random.rand(1, 1, 3), + dims=["lsmlat", "lsmlon", "numurbl"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "numurbl": numurbl}, + attrs={ + "long_name": "percent urban for each density type", + "units": "unitless", + }, + ), + "PCT_GLACIER": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={"long_name": "percent glacier", "units": "unitless"}, + ), + "STD_ELEV": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={"long_name": "standard deviation of elevation", "units": "m"}, + ), + "FMAX": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={ + "long_name": "maximum fractional saturated area", + "units": "unitless", + }, + ), + }, + attrs={"Conventions": "test data only"}, + ) + + def test_modify_surfdata_atpoint_nocrop_1pft_pctnatpft(self): + """ + Test modify_surfdata_atpoint + Checks PCT_NAT_PFT for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = False + single_point.dom_pft = [5] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + expected_out = np.zeros((1, 1, 15)) + expected_out[:, :, 5] = 100 + + # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) + np.testing.assert_array_equal(ds_out["PCT_NAT_PFT"].data, expected_out) + + def test_modify_surfdata_atpoint_nocrop_1pft_pctnatveg(self): + """ + Test modify_surfdata_atpoint + Checks PCT_NATVEG for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = False + single_point.dom_pft = [5] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out['PCT_NATVEG'].data[:,:], 100) + + def test_modify_surfdata_atpoint_nocrop_1pft_pctcrop(self): + """ + Test modify_surfdata_atpoint + Checks PCT_CROP for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = False + single_point.dom_pft = [5] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out['PCT_CROP'].data[:,:], 0) + + def test_modify_surfdata_atpoint_nocrop_1pft_glacier(self): + """ + Test modify_surfdata_atpoint + Checks GLACIER for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = False + single_point.dom_pft = [5] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out['PCT_GLACIER'].data[:,:], 0) + + + def test_modify_surfdata_atpoint_nocrop_1pft_unisnow(self): + """ + Test modify_surfdata_atpoint + Checks STD_ELV for one pft and unisnow + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = False + single_point.dom_pft = [5] + single_point.uni_snow = True + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out['STD_ELEV'].data[:,:], 20) + + def test_modify_surfdata_atpoint_nocrop_1pft_capsat(self): + """ + Test modify_surfdata_atpoint + Checks FMAX for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = False + single_point.dom_pft = [5] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + single_point.cap_saturation = True + + self.assertEqual(ds_out['FMAX'].data[:,:], 0) + + def test_modify_surfdata_atpoint_nocrop_multipft(self): + """ + Test modify_surfdata_atpoint + Checks PCT_NAT_PFT for multi pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = False + single_point.dom_pft = [1,3,5] + single_point.pct_pft = [0.5, 0.4, 0.1] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + expected_out = np.zeros((1, 1, 15)) + expected_out[:, :, 1] = 0.5 + expected_out[:, :, 3] = 0.4 + expected_out[:, :, 5] = 0.1 + + # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) + np.testing.assert_array_equal(ds_out["PCT_NAT_PFT"].data, expected_out) + + +class TestSinglePointCaseSurfaceCrop(unittest.TestCase): + """ + Basic class for testing creating and modifying surface dataset for + crop cases (aka using 78 pft dataset) in SinglePointCase class in single_point_case.py. + + """ + + plat = 20.1 + plon = 50.5 + site_name = None + create_domain = True + create_surfdata = True + create_landuse = True + create_datm = True + create_user_mods = True + dom_pft = [17] + pct_pft = None + num_pft = 78 + include_nonveg = False + uni_snow = False + cap_saturation =False + out_dir = os.getcwd() + overwrite = False + + # -- dimensions of xarray dataset + lsmlat = [plat] + lsmlon = [plon] + natpft = np.arange(0, 15, 1, dtype=int) + cft = np.arange(15, 79, 1, dtype=int) + numurbl = np.arange(0, 3, 1, dtype=int) + + ds_test = xr.Dataset( + { + "PCT_NATVEG": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={ + "long_name": "total percent natural vegetation landunit", + "units": "unitless", + }, + ), + "PCT_CROP": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={"long_name": "total percent crop landunit", "units": "unitless"}, + ), + "PCT_NAT_PFT": xr.DataArray( + data=np.random.rand(1, 1, 15), + dims=["lsmlat", "lsmlon", "natpft"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "natpft": natpft}, + attrs={ + "long_name": "percent plant functional type on the natural veg landunit", + "units": "unitless", + }, + ), + "PCT_CFT": xr.DataArray( + data=np.random.rand(1, 1, 64), + dims=["lsmlat", "lsmlon", "cft"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "cft": cft}, + attrs={ + "long_name": "percent crop functional type on the crop landunit", + "units": "unitless", + }, + ), + "PCT_LAKE": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={"long_name": "percent lake", "units": "unitless"}, + ), + "PCT_WETLAND": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={"long_name": "percent wetland", "units": "unitless"}, + ), + "PCT_URBAN": xr.DataArray( + data=np.random.rand(1, 1, 3), + dims=["lsmlat", "lsmlon", "numurbl"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon, "numurbl": numurbl}, + attrs={ + "long_name": "percent urban for each density type", + "units": "unitless", + }, + ), + "PCT_GLACIER": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={"long_name": "percent glacier", "units": "unitless"}, + ), + "STD_ELEV": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={"long_name": "standard deviation of elevation", "units": "m"}, + ), + "FMAX": xr.DataArray( + data=np.random.rand(1, 1), + dims=["lsmlat", "lsmlon"], + coords={"lsmlat": lsmlat, "lsmlon": lsmlon}, + attrs={ + "long_name": "maximum fractional saturated area", + "units": "unitless", + }, + ), + }, + attrs={"Conventions": "test data only"}, + ) + + def test_modify_surfdata_atpoint_crop_1pft_pctnatpft(self): + """ + Test modify_surfdata_atpoint + Checks PCT_NAT_PFT for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.dom_pft = [19] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + expected_out = np.zeros((1, 1, 64)) + expected_out[:, :, 4] = 100 + + # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) + np.testing.assert_array_equal(ds_out["PCT_CFT"].data, expected_out) + + def test_modify_surfdata_atpoint_crop_1pft_pctnatveg(self): + """ + Test modify_surfdata_atpoint + Checks PCT_NATVEG for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.dom_pft = [17] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out['PCT_NATVEG'].data[:,:], 0) + + def test_modify_surfdata_atpoint_crop_1pft_pctcrop(self): + """ + Test modify_surfdata_atpoint + Checks PCT_CROP for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.dom_pft = [17] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out['PCT_CROP'].data[:,:], 100) + + def test_modify_surfdata_atpoint_crop_1pft_glacier(self): + """ + Test modify_surfdata_atpoint + Checks GLACIER for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.dom_pft = [17] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out['PCT_GLACIER'].data[:,:], 0) + + + def test_modify_surfdata_atpoint_nocrop_1pft_unisnow(self): + """ + Test modify_surfdata_atpoint for crop cases + Checks STD_ELV for one pft and unisnow + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.dom_pft = [17] + single_point.uni_snow = True + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out['STD_ELEV'].data[:,:], 20) + + def test_modify_surfdata_atpoint_crop_1pft_capsat(self): + """ + Test modify_surfdata_atpoint for crop cases + Checks FMAX for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.cap_saturation = True + single_point.dom_pft = [22] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + single_point.cap_saturation = True + + self.assertEqual(ds_out['FMAX'].data[:,:], 0) + + def test_modify_surfdata_atpoint_crop_multipft(self): + """ + Test modify_surfdata_atpoint for crop cases + Checks PCT_NAT_PFT for multi pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.dom_pft = [17,22] + single_point.pct_pft = [0.6, 0.4] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + expected_out = np.zeros((1, 1, 64)) + expected_out[:, :, 2] = 0.6 + expected_out[:, :, 7] = 0.4 + + + # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) + np.testing.assert_array_equal(ds_out["PCT_CFT"].data, expected_out) + + + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() From 5e2a83af0a3d1114d76caeb3df2739d833c95c3c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 16:20:11 -0700 Subject: [PATCH 178/223] pylint clean ups --- python/ctsm/test/test_unit_singlept_data_surfdata.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/ctsm/test/test_unit_singlept_data_surfdata.py b/python/ctsm/test/test_unit_singlept_data_surfdata.py index 575f24745d..25521a2951 100755 --- a/python/ctsm/test/test_unit_singlept_data_surfdata.py +++ b/python/ctsm/test/test_unit_singlept_data_surfdata.py @@ -9,7 +9,6 @@ """ import unittest -import argparse import os import sys From c1d47583f2b59cfcda8ac0c7ac13b62167933d25 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 16:20:30 -0700 Subject: [PATCH 179/223] more clean ups --- .../test/test_unit_singlept_data_surfdata.py | 31 ++++++++----------- 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/python/ctsm/test/test_unit_singlept_data_surfdata.py b/python/ctsm/test/test_unit_singlept_data_surfdata.py index 25521a2951..1e3d7e9f76 100755 --- a/python/ctsm/test/test_unit_singlept_data_surfdata.py +++ b/python/ctsm/test/test_unit_singlept_data_surfdata.py @@ -201,7 +201,7 @@ def test_modify_surfdata_atpoint_nocrop_1pft_pctnatveg(self): single_point.dom_pft = [5] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) - self.assertEqual(ds_out['PCT_NATVEG'].data[:,:], 100) + self.assertEqual(ds_out["PCT_NATVEG"].data[:, :], 100) def test_modify_surfdata_atpoint_nocrop_1pft_pctcrop(self): """ @@ -230,7 +230,7 @@ def test_modify_surfdata_atpoint_nocrop_1pft_pctcrop(self): single_point.dom_pft = [5] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) - self.assertEqual(ds_out['PCT_CROP'].data[:,:], 0) + self.assertEqual(ds_out["PCT_CROP"].data[:, :], 0) def test_modify_surfdata_atpoint_nocrop_1pft_glacier(self): """ @@ -259,8 +259,7 @@ def test_modify_surfdata_atpoint_nocrop_1pft_glacier(self): single_point.dom_pft = [5] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) - self.assertEqual(ds_out['PCT_GLACIER'].data[:,:], 0) - + self.assertEqual(ds_out["PCT_GLACIER"].data[:, :], 0) def test_modify_surfdata_atpoint_nocrop_1pft_unisnow(self): """ @@ -290,7 +289,7 @@ def test_modify_surfdata_atpoint_nocrop_1pft_unisnow(self): single_point.uni_snow = True ds_out = single_point.modify_surfdata_atpoint(self.ds_test) - self.assertEqual(ds_out['STD_ELEV'].data[:,:], 20) + self.assertEqual(ds_out["STD_ELEV"].data[:, :], 20) def test_modify_surfdata_atpoint_nocrop_1pft_capsat(self): """ @@ -320,7 +319,7 @@ def test_modify_surfdata_atpoint_nocrop_1pft_capsat(self): ds_out = single_point.modify_surfdata_atpoint(self.ds_test) single_point.cap_saturation = True - self.assertEqual(ds_out['FMAX'].data[:,:], 0) + self.assertEqual(ds_out["FMAX"].data[:, :], 0) def test_modify_surfdata_atpoint_nocrop_multipft(self): """ @@ -346,7 +345,7 @@ def test_modify_surfdata_atpoint_nocrop_multipft(self): overwrite=self.overwrite, ) single_point.include_nonveg = False - single_point.dom_pft = [1,3,5] + single_point.dom_pft = [1, 3, 5] single_point.pct_pft = [0.5, 0.4, 0.1] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) @@ -379,7 +378,7 @@ class TestSinglePointCaseSurfaceCrop(unittest.TestCase): num_pft = 78 include_nonveg = False uni_snow = False - cap_saturation =False + cap_saturation = False out_dir = os.getcwd() overwrite = False @@ -529,7 +528,7 @@ def test_modify_surfdata_atpoint_crop_1pft_pctnatveg(self): single_point.dom_pft = [17] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) - self.assertEqual(ds_out['PCT_NATVEG'].data[:,:], 0) + self.assertEqual(ds_out["PCT_NATVEG"].data[:, :], 0) def test_modify_surfdata_atpoint_crop_1pft_pctcrop(self): """ @@ -557,7 +556,7 @@ def test_modify_surfdata_atpoint_crop_1pft_pctcrop(self): single_point.dom_pft = [17] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) - self.assertEqual(ds_out['PCT_CROP'].data[:,:], 100) + self.assertEqual(ds_out["PCT_CROP"].data[:, :], 100) def test_modify_surfdata_atpoint_crop_1pft_glacier(self): """ @@ -585,8 +584,7 @@ def test_modify_surfdata_atpoint_crop_1pft_glacier(self): single_point.dom_pft = [17] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) - self.assertEqual(ds_out['PCT_GLACIER'].data[:,:], 0) - + self.assertEqual(ds_out["PCT_GLACIER"].data[:, :], 0) def test_modify_surfdata_atpoint_nocrop_1pft_unisnow(self): """ @@ -615,7 +613,7 @@ def test_modify_surfdata_atpoint_nocrop_1pft_unisnow(self): single_point.uni_snow = True ds_out = single_point.modify_surfdata_atpoint(self.ds_test) - self.assertEqual(ds_out['STD_ELEV'].data[:,:], 20) + self.assertEqual(ds_out["STD_ELEV"].data[:, :], 20) def test_modify_surfdata_atpoint_crop_1pft_capsat(self): """ @@ -645,7 +643,7 @@ def test_modify_surfdata_atpoint_crop_1pft_capsat(self): ds_out = single_point.modify_surfdata_atpoint(self.ds_test) single_point.cap_saturation = True - self.assertEqual(ds_out['FMAX'].data[:,:], 0) + self.assertEqual(ds_out["FMAX"].data[:, :], 0) def test_modify_surfdata_atpoint_crop_multipft(self): """ @@ -670,7 +668,7 @@ def test_modify_surfdata_atpoint_crop_multipft(self): out_dir=self.out_dir, overwrite=self.overwrite, ) - single_point.dom_pft = [17,22] + single_point.dom_pft = [17, 22] single_point.pct_pft = [0.6, 0.4] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) @@ -678,13 +676,10 @@ def test_modify_surfdata_atpoint_crop_multipft(self): expected_out[:, :, 2] = 0.6 expected_out[:, :, 7] = 0.4 - # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) np.testing.assert_array_equal(ds_out["PCT_CFT"].data, expected_out) - - if __name__ == "__main__": unit_testing.setup_for_tests() unittest.main() From c498a664d3821d763aeccb2b6829144455d3ed09 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 16:21:18 -0700 Subject: [PATCH 180/223] more reformatting. --- .../site_and_regional/single_point_case.py | 273 +++++++++++------- 1 file changed, 165 insertions(+), 108 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index e87bd74a5f..9baed758fc 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -25,6 +25,7 @@ FIRST_MONTH = 1 LAST_MONTH = 12 + class SinglePointCase(BaseCase): """ A class to encapsulate everything for single point cases. @@ -96,26 +97,32 @@ class SinglePointCase(BaseCase): # the ones we have are useful def __init__( - self, - plat, - plon, - site_name, + self, + plat, + plon, + site_name, + create_domain, + create_surfdata, + create_landuse, + create_datm, + create_user_mods, + dom_pft, + pct_pft, + num_pft, + include_nonveg, + uni_snow, + cap_saturation, + out_dir, + overwrite, + ): + super().__init__( create_domain, create_surfdata, create_landuse, create_datm, create_user_mods, - dom_pft, - pct_pft, - num_pft, - include_nonveg, - uni_snow, - cap_saturation, - out_dir, overwrite, - ): - super().__init__(create_domain, create_surfdata, create_landuse, create_datm, - create_user_mods, overwrite) + ) self.plat = plat self.plon = plon self.site_name = site_name @@ -128,9 +135,9 @@ def __init__( self.out_dir = out_dir self.create_tag() - self.check_dom_pft () - self.check_nonveg () - self.check_pct_pft () + self.check_dom_pft() + self.check_nonveg() + self.check_pct_pft() def create_tag(self): """ @@ -142,7 +149,7 @@ def create_tag(self): else: self.tag = "{}_{}".format(str(self.plon), str(self.plat)) - def check_dom_pft (self): + def check_dom_pft(self): """ A function to sanity check values in dom_pft: @@ -173,23 +180,25 @@ def check_dom_pft (self): """ if self.dom_pft is None: - logger.warning ("No dominant pft type is chosen. " - "If you want to choose a dominant pft type, please use --dompft flag.") + logger.warning( + "No dominant pft type is chosen. " + "If you want to choose a dominant pft type, please use --dompft flag." + ) else: min_dom_pft = min(self.dom_pft) max_dom_pft = max(self.dom_pft) - #-- check dom_pft values should be between 0-MAX_PFT - if min_dom_pft <0 or max_dom_pft >MAX_PFT: + # -- check dom_pft values should be between 0-MAX_PFT + if min_dom_pft < 0 or max_dom_pft > MAX_PFT: err_msg = "values for --dompft should be between 1 and 78." raise argparse.ArgumentTypeError(err_msg) - #-- check dom_pft vs num_pft - if self.num_pft -1 < max_dom_pft < MAX_PFT : + # -- check dom_pft vs num_pft + if self.num_pft - 1 < max_dom_pft < MAX_PFT: err_msg = "Please use --crop flag when --dompft is above 15." raise argparse.ArgumentTypeError(err_msg) - #-- check if all dom_pft are in the same range: + # -- check if all dom_pft are in the same range: if min_dom_pft < NAT_PFT <= max_dom_pft: err_msg = """ \n @@ -198,10 +207,12 @@ def check_dom_pft (self): one of these ranges: - 0-{} natural pfts - {}-{} crop pfts (cfts) - """.format(NAT_PFT-1, NAT_PFT, MAX_PFT) + """.format( + NAT_PFT - 1, NAT_PFT, MAX_PFT + ) raise argparse.ArgumentTypeError(err_msg) - def check_nonveg (self): + def check_nonveg(self): """ A function to check at least one of the following arguments is given: --include-nonveg @@ -235,8 +246,7 @@ def check_nonveg (self): """ raise argparse.ArgumentTypeError(err_msg) - - def check_pct_pft (self): + def check_pct_pft(self): """ A function to error check pct_pft and calculate it if necessary. @@ -264,7 +274,7 @@ def check_pct_pft (self): raise argparse.ArgumentTypeError(err_msg) # -- check if the sum of pct_pft is equal to 1 or 100 - if sum(self.pct_pft)!= 1 and sum(self.pct_pft) != 100: + if sum(self.pct_pft) != 1 and sum(self.pct_pft) != 100: err_msg = "Sum of --pctpft values should be equal to 1 or 100." raise argparse.ArgumentTypeError(err_msg) @@ -274,7 +284,7 @@ def check_pct_pft (self): # -- if the user did not give --pctpft at all (assume equal percentage) elif self.dom_pft: - pct = 100/len(self.dom_pft) + pct = 100 / len(self.dom_pft) self.pct_pft = [pct for pft in self.dom_pft] # -- if the user only gave --pctpft with no --dompft @@ -284,17 +294,21 @@ def check_pct_pft (self): --pctpft is specfied without --dompft. Please specify your dominant pft by --dompft. """ - raise argparse.ArgumentTypeError (err_msg) + raise argparse.ArgumentTypeError(err_msg) - logger.info (" - dominant pft(s) : %s",self.dom_pft) - logger.info (" - percentage of dominant pft(s) : %s",self.pct_pft) + logger.info(" - dominant pft(s) : %s", self.dom_pft) + logger.info(" - percentage of dominant pft(s) : %s", self.pct_pft) def create_domain_at_point(self, indir, file): """ Create domain file for this SinglePointCase class. """ - logger.info("----------------------------------------------------------------------") - logger.info("Creating domain file at %s, %s.", self.plon.__str__(), self.plat.__str__()) + logger.info( + "----------------------------------------------------------------------" + ) + logger.info( + "Creating domain file at %s, %s.", self.plon.__str__(), self.plat.__str__() + ) # specify files fdomain_in = os.path.join(indir, file) @@ -316,7 +330,7 @@ def create_domain_at_point(self, indir, file): f_out.attrs["Created_from"] = fdomain_in wfile = os.path.join(self.out_dir, fdomain_out) - self.write_to_netcdf (f_out, wfile) + self.write_to_netcdf(f_out, wfile) logger.info("Successfully created file (fdomain_out) %s", wfile) f_in.close() f_out.close() @@ -325,8 +339,14 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): """ Create landuse file at a single point. """ - logger.info("----------------------------------------------------------------------") - logger.info("Creating land use file at %s, %s.", self.plon.__str__(), self.plat.__str__()) + logger.info( + "----------------------------------------------------------------------" + ) + logger.info( + "Creating land use file at %s, %s.", + self.plon.__str__(), + self.plat.__str__(), + ) # specify files fluse_in = os.path.join(indir, file) @@ -335,9 +355,7 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): logger.info("fluse_out: %s", os.path.join(self.out_dir, fluse_out)) # create 1d coordinate variables to enable sel() method - f_in = self.create_1d_coord( - fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat" - ) + f_in = self.create_1d_coord(fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") # extract gridcell closest to plon/plat f_out = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") @@ -346,11 +364,13 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): f_out = f_out.expand_dims(["lsmlat", "lsmlon"]) # specify dimension order - f_out = f_out.transpose(u"time", u"cft", u"natpft", u"lsmlat", u"lsmlon") + f_out = f_out.transpose("time", "cft", "natpft", "lsmlat", "lsmlon") # revert expand dimensions of YEAR year = np.squeeze(np.asarray(f_out["YEAR"])) - temp_xr = xr.DataArray(year, coords={"time": f_out["time"]}, dims="time", name="YEAR") + temp_xr = xr.DataArray( + year, coords={"time": f_out["time"]}, dims="time", name="YEAR" + ) temp_xr.attrs["units"] = "unitless" temp_xr.attrs["long_name"] = "Year of PFT data" f_out["YEAR"] = temp_xr @@ -360,7 +380,7 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): f_out.attrs["Created_from"] = fluse_in wfile = os.path.join(self.out_dir, fluse_out) - self.write_to_netcdf (f_out, wfile) + self.write_to_netcdf(f_out, wfile) logger.info("Successfully created file (fluse_out), %s", wfile) f_in.close() f_out.close() @@ -368,38 +388,38 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): # write to user_nl_clm data if specified if self.create_user_mods: with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm: - line = "flanduse_timeseries = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) + line = "flanduse_timeseries = '${}'".format( + os.path.join(USRDAT_DIR, fluse_out) + ) self.write_to_file(line, nl_clm) - - - def modify_surfdata_atpoint (self, f_tmp): + def modify_surfdata_atpoint(self, f_tmp): """ Function to modify surface dataset based on the user flags chosen. """ - #-- modify surface data properties + # -- modify surface data properties if self.dom_pft is not None: max_dom_pft = max(self.dom_pft) - #-- First initialize everything: - if max_dom_pft < NAT_PFT : - f_tmp ["PCT_NAT_PFT"][:,:,:] = 0 + # -- First initialize everything: + if max_dom_pft < NAT_PFT: + f_tmp["PCT_NAT_PFT"][:, :, :] = 0 else: - f_tmp ["PCT_CFT"][:,:,:] = 0 + f_tmp["PCT_CFT"][:, :, :] = 0 # Do we need to initialize these here? # Because we set them in include_nonveg - #f_tmp["PCT_NATVEG"][:, :] = 0 - #f_tmp["PCT_CROP"][:, :] = 0 + # f_tmp["PCT_NATVEG"][:, :] = 0 + # f_tmp["PCT_CROP"][:, :] = 0 - #-- loop over all dom_pft and pct_pft - zip_pfts = zip (self.dom_pft, self.pct_pft) + # -- loop over all dom_pft and pct_pft + zip_pfts = zip(self.dom_pft, self.pct_pft) for dom_pft, pct_pft in zip_pfts: if dom_pft < NAT_PFT: - f_tmp['PCT_NAT_PFT'][:, :, dom_pft] = pct_pft + f_tmp["PCT_NAT_PFT"][:, :, dom_pft] = pct_pft else: - dom_pft = dom_pft-NAT_PFT - f_tmp['PCT_CFT'][:, :, dom_pft] = pct_pft + dom_pft = dom_pft - NAT_PFT + f_tmp["PCT_CFT"][:, :, dom_pft] = pct_pft # ------------------------------- # By default include_nonveg=False @@ -407,14 +427,14 @@ def modify_surfdata_atpoint (self, f_tmp): # Therefore by default we are hitting the following if: if not self.include_nonveg: - logger.info ("Zeroing out non-vegetation land units in the surface data.") + logger.info("Zeroing out non-vegetation land units in the surface data.") f_tmp["PCT_LAKE"][:, :] = 0.0 f_tmp["PCT_WETLAND"][:, :] = 0.0 f_tmp["PCT_URBAN"][:, :] = 0.0 f_tmp["PCT_GLACIER"][:, :] = 0.0 max_dom_pft = max(self.dom_pft) - if max_dom_pft < NAT_PFT : + if max_dom_pft < NAT_PFT: f_tmp["PCT_NATVEG"][:, :] = 100 f_tmp["PCT_CROP"][:, :] = 0 else: @@ -422,8 +442,10 @@ def modify_surfdata_atpoint (self, f_tmp): f_tmp["PCT_CROP"][:, :] = 100 else: - logger.info ("You chose --include-nonveg --> \ - Do not zero non-vegetation land units in the surface data.") + logger.info( + "You chose --include-nonveg --> \ + Do not zero non-vegetation land units in the surface data." + ) if self.uni_snow: f_tmp["STD_ELEV"][:, :] = 20.0 @@ -432,15 +454,19 @@ def modify_surfdata_atpoint (self, f_tmp): return f_tmp - def create_surfdata_at_point(self, indir, file, user_mods_dir): """ Create surface data file at a single point. """ # pylint: disable=too-many-statements - logger.info("----------------------------------------------------------------------") logger.info( - "Creating surface dataset file at %s, %s", self.plon.__str__(), self.plat.__str__()) + "----------------------------------------------------------------------" + ) + logger.info( + "Creating surface dataset file at %s, %s", + self.plon.__str__(), + self.plat.__str__(), + ) # specify file fsurf_in = os.path.join(indir, file) @@ -457,20 +483,20 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # expand dimensions f_tmp = f_tmp.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) - f_out = self.modify_surfdata_atpoint (f_tmp) + f_out = self.modify_surfdata_atpoint(f_tmp) # specify dimension order f_out = f_out.transpose( - u"time", - u"cft", - u"lsmpft", - u"natpft", - u"nglcec", - u"nglcecp1", - u"nlevsoi", - u"nlevurb", - u"numrad", - u"numurbl", + "time", + "cft", + "lsmpft", + "natpft", + "nglcec", + "nglcecp1", + "nlevsoi", + "nlevurb", + "numrad", + "numurbl", "lsmlat", "lsmlon", ) @@ -478,17 +504,17 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir): # update lsmlat and lsmlon to match site specific instead of the nearest point # we do this so that if we create user_mods the PTS_LON and PTS_LAT in CIME match # the surface data coordinates - which is required - f_out['lsmlon'] = np.atleast_1d(self.plon) - f_out['lsmlat'] = np.atleast_1d(self.plat) - f_out['LATIXY'][:, :] = self.plat - f_out['LONGXY'][:, :] = self.plon + f_out["lsmlon"] = np.atleast_1d(self.plon) + f_out["lsmlat"] = np.atleast_1d(self.plat) + f_out["LATIXY"][:, :] = self.plat + f_out["LONGXY"][:, :] = self.plon # update attributes self.update_metadata(f_out) f_out.attrs["Created_from"] = fsurf_in wfile = os.path.join(self.out_dir, fsurf_out) - self.write_to_netcdf (f_out, wfile) + self.write_to_netcdf(f_out, wfile) logger.info("Successfully created file (fsurf_out) %s", wfile) f_in.close() f_tmp.close() @@ -504,9 +530,14 @@ def create_datmdomain_at_point(self, datm_tuple: DatmFiles): """ Create DATM domain file at a single point """ - logger.info("----------------------------------------------------------------------") logger.info( - "Creating DATM domain file at %s, %s", self.plon.__str__(), self.plat.__str__()) + "----------------------------------------------------------------------" + ) + logger.info( + "Creating DATM domain file at %s, %s", + self.plon.__str__(), + self.plat.__str__(), + ) # specify files fdatmdomain_in = os.path.join(datm_tuple.indir, datm_tuple.fdomain_in) @@ -529,7 +560,7 @@ def create_datmdomain_at_point(self, datm_tuple: DatmFiles): f_out.attrs["Created_from"] = fdatmdomain_in wfile = os.path.join(self.out_dir, fdatmdomain_out) - self.write_to_netcdf (f_out, wfile) + self.write_to_netcdf(f_out, wfile) logger.info("Successfully created file (fdatmdomain_out) : %s", wfile) f_in.close() f_out.close() @@ -548,13 +579,13 @@ def extract_datm_at(self, file_in, file_out): f_out = f_out.expand_dims(["lat", "lon"]) # specify dimension order - f_out = f_out.transpose(u"scalar", "time", "lat", "lon") + f_out = f_out.transpose("scalar", "time", "lat", "lon") # update attributes self.update_metadata(f_out) f_out.attrs["Created_from"] = file_in - self.write_to_netcdf (f_out, file_out) + self.write_to_netcdf(f_out, file_out) logger.info("Successfully created file : %s", file_out) f_in.close() f_out.close() @@ -564,9 +595,13 @@ def write_shell_commands(self, file): writes out xml commands commands to a file (i.e. shell_commands) for single-point runs """ # write_to_file surrounds text with newlines - with open(file, 'w') as nl_file: - self.write_to_file("# Change below line if you move the subset data directory", nl_file) - self.write_to_file("./xmlchange {}={}".format(USRDAT_DIR, self.out_dir), nl_file) + with open(file, "w") as nl_file: + self.write_to_file( + "# Change below line if you move the subset data directory", nl_file + ) + self.write_to_file( + "./xmlchange {}={}".format(USRDAT_DIR, self.out_dir), nl_file + ) self.write_to_file("./xmlchange PTS_LON={}".format(str(self.plon)), nl_file) self.write_to_file("./xmlchange PTS_LAT={}".format(str(self.plat)), nl_file) self.write_to_file("./xmlchange MPILIB=mpi-serial", nl_file) @@ -580,16 +615,24 @@ def write_datm_streams_lines(self, streamname, datmfiles, file): datmfiles - comma-separated list (str) of DATM file names file - file connection to user_nl_datm_streams file """ - self.write_to_file("{}:datafiles={}".format(streamname, ','.join(datmfiles)), file) + self.write_to_file( + "{}:datafiles={}".format(streamname, ",".join(datmfiles)), file + ) self.write_to_file("{}:mapalgo=none".format(streamname), file) self.write_to_file("{}:meshfile=none".format(streamname), file) - def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_streams_file): + def create_datm_at_point( + self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_streams_file + ): """ Create all of a DATM dataset at a point. """ - logger.info("----------------------------------------------------------------------") - logger.info("Creating DATM files at %s, %s", self.plon.__str__(), self.plat.__str__()) + logger.info( + "----------------------------------------------------------------------" + ) + logger.info( + "Creating DATM files at %s, %s", self.plon.__str__(), self.plat.__str__() + ) # -- create data files infile = [] @@ -606,27 +649,41 @@ def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_s dtag = ystr + "-" + mstr - fsolar = os.path.join(datm_tuple.indir, datm_tuple.dir_solar, - "{}{}.nc".format(datm_tuple.tag_solar, dtag)) + fsolar = os.path.join( + datm_tuple.indir, + datm_tuple.dir_solar, + "{}{}.nc".format(datm_tuple.tag_solar, dtag), + ) fsolar2 = "{}{}.{}.nc".format(datm_tuple.tag_solar, self.tag, dtag) - fprecip = os.path.join(datm_tuple.indir, datm_tuple.dir_prec, - "{}{}.nc".format(datm_tuple.tag_prec, dtag)) + fprecip = os.path.join( + datm_tuple.indir, + datm_tuple.dir_prec, + "{}{}.nc".format(datm_tuple.tag_prec, dtag), + ) fprecip2 = "{}{}.{}.nc".format(datm_tuple.tag_prec, self.tag, dtag) - ftpqw = os.path.join(datm_tuple.indir, datm_tuple.dir_tpqw, - "{}{}.nc".format(datm_tuple.tag_tpqw, dtag)) + ftpqw = os.path.join( + datm_tuple.indir, + datm_tuple.dir_tpqw, + "{}{}.nc".format(datm_tuple.tag_tpqw, dtag), + ) ftpqw2 = "{}{}.{}.nc".format(datm_tuple.tag_tpqw, self.tag, dtag) outdir = os.path.join(self.out_dir, datm_tuple.outdir) infile += [fsolar, fprecip, ftpqw] - outfile += [os.path.join(outdir, fsolar2), - os.path.join(outdir, fprecip2), - os.path.join(outdir, ftpqw2)] + outfile += [ + os.path.join(outdir, fsolar2), + os.path.join(outdir, fprecip2), + os.path.join(outdir, ftpqw2), + ] solarfiles.append( - os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fsolar2)) + os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fsolar2) + ) precfiles.append( - os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fprecip2)) + os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fprecip2) + ) tpqwfiles.append( - os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, ftpqw2)) + os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, ftpqw2) + ) for idx, out_f in enumerate(outfile): logger.debug(out_f) From d94109718b15efdf2ce30202c6d449c43cc1b00d Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 16:21:57 -0700 Subject: [PATCH 181/223] reformatting unit test singlept --- python/ctsm/test/test_unit_singlept_data.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/python/ctsm/test/test_unit_singlept_data.py b/python/ctsm/test/test_unit_singlept_data.py index 76371122bd..b924d49762 100755 --- a/python/ctsm/test/test_unit_singlept_data.py +++ b/python/ctsm/test/test_unit_singlept_data.py @@ -66,7 +66,7 @@ def test_create_tag_noname(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, - overwrite = self.overwrite, + overwrite=self.overwrite, ) single_point.create_tag() @@ -92,7 +92,7 @@ def test_create_tag_name(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, - overwrite = self.overwrite, + overwrite=self.overwrite, ) single_point.site_name = "foo" single_point.create_tag() @@ -119,7 +119,7 @@ def test_check_dom_pft_too_big(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, - overwrite = self.overwrite, + overwrite=self.overwrite, ) single_point.dom_pft = [16, 36, 79] with self.assertRaisesRegex( @@ -148,7 +148,7 @@ def test_check_dom_pft_too_small(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, - overwrite = self.overwrite, + overwrite=self.overwrite, ) single_point.dom_pft = [16, 36, -1] with self.assertRaisesRegex( @@ -177,7 +177,7 @@ def test_check_dom_pft_numpft(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, - overwrite = self.overwrite, + overwrite=self.overwrite, ) single_point.dom_pft = [15, 53] single_point.num_pft = 16 @@ -205,7 +205,7 @@ def test_check_dom_pft_mixed_range(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, - overwrite = self.overwrite, + overwrite=self.overwrite, ) single_point.dom_pft = [1, 5, 15] single_point.num_pft = 78 @@ -235,7 +235,7 @@ def test_check_nonveg_nodompft(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, - overwrite = self.overwrite, + overwrite=self.overwrite, ) single_point.dom_pft = None single_point.include_nonveg = False @@ -266,7 +266,7 @@ def test_check_pct_pft_notsamenumbers(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, - overwrite = self.overwrite, + overwrite=self.overwrite, ) single_point.dom_pft = [1, 5] single_point.pct_pft = [0.5] @@ -296,7 +296,7 @@ def test_check_pct_pft_sum_not1(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, - overwrite = self.overwrite, + overwrite=self.overwrite, ) single_point.dom_pft = [1, 5] single_point.pct_pft = [0.1, 0.5] @@ -326,7 +326,7 @@ def test_check_pct_pft_fraction_topct(self): uni_snow=self.uni_snow, cap_saturation=self.cap_saturation, out_dir=self.out_dir, - overwrite = self.overwrite, + overwrite=self.overwrite, ) single_point.dom_pft = [1, 5, 8] single_point.pct_pft = [0.5, 0.4, 0.1] From b4d4914daa7d5bc625d8eb1aea41ab0ab78d788c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 16:22:21 -0700 Subject: [PATCH 182/223] clean pylint --- python/ctsm/site_and_regional/base_case.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 7f12771b67..7ea74ff6d9 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -217,6 +217,6 @@ def write_to_netcdf (self, xr_ds, nc_fname): xr_ds.to_netcdf(path=nc_fname, mode="w", format="NETCDF3_64BIT") else: err_msg = ("File "+nc_fname+ " already exists."+ - "\n Either remove the file or use --overwrite to overwrite the existing files.") + "\n Either remove the file or use "+ + "--overwrite to overwrite the existing files.") abort (err_msg) - From e77de84ed77e1891769bca580bb668ebff03eb24 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 16:24:05 -0700 Subject: [PATCH 183/223] clean up base_case.py through black --- python/ctsm/site_and_regional/base_case.py | 41 ++++++++++++++++------ 1 file changed, 30 insertions(+), 11 deletions(-) diff --git a/python/ctsm/site_and_regional/base_case.py b/python/ctsm/site_and_regional/base_case.py index 7ea74ff6d9..d0567f68aa 100644 --- a/python/ctsm/site_and_regional/base_case.py +++ b/python/ctsm/site_and_regional/base_case.py @@ -28,9 +28,10 @@ DatmFiles = namedtuple( "DatmFiles", "indir outdir fdomain_in dir_solar dir_prec dir_tpqw tag_solar tag_prec tag_tpqw name_solar " - "name_prec name_tpqw " + "name_prec name_tpqw ", ) + class BaseCase: """ Parent class to SinglePointCase and RegionalCase @@ -63,8 +64,15 @@ class BaseCase: write xarray dataset to netcdf """ - def __init__(self, create_domain, create_surfdata, create_landuse, create_datm, - create_user_mods, overwrite): + def __init__( + self, + create_domain, + create_surfdata, + create_landuse, + create_datm, + create_user_mods, + overwrite, + ): """ Initializes BaseCase with the given arguments. @@ -94,8 +102,15 @@ def __str__(self): """ Converts ingredients of the BaseCase to string for printing. """ - return "{}\n{}".format(str(self.__class__), "\n".join( - ("{} = {}".format(str(key), str(self.__dict__[key])) for key in sorted(self.__dict__)))) + return "{}\n{}".format( + str(self.__class__), + "\n".join( + ( + "{} = {}".format(str(key), str(self.__dict__[key])) + for key in sorted(self.__dict__) + ) + ), + ) @staticmethod def create_1d_coord(filename, lon_varname, lat_varname, x_dim, y_dim): @@ -169,7 +184,7 @@ def update_metadata(nc_file): nc_file.attrs["Created_on"] = today_string nc_file.attrs["Created_by"] = getuser() - nc_file.attrs["Created_with"] = './subset_data' + " -- " + sha + nc_file.attrs["Created_with"] = "./subset_data" + " -- " + sha # delete unrelated attributes if they exist del_attrs = [ @@ -197,7 +212,7 @@ def write_to_file(text, file_out): """ file_out.write("\n{}\n".format(text)) - def write_to_netcdf (self, xr_ds, nc_fname): + def write_to_netcdf(self, xr_ds, nc_fname): """ Writes a netcdf file if - the file does not exist. @@ -216,7 +231,11 @@ def write_to_netcdf (self, xr_ds, nc_fname): # mode 'w' overwrites file xr_ds.to_netcdf(path=nc_fname, mode="w", format="NETCDF3_64BIT") else: - err_msg = ("File "+nc_fname+ " already exists."+ - "\n Either remove the file or use "+ - "--overwrite to overwrite the existing files.") - abort (err_msg) + err_msg = ( + "File " + + nc_fname + + " already exists." + + "\n Either remove the file or use " + + "--overwrite to overwrite the existing files." + ) + abort(err_msg) From 4134faa186fd5f005e7c38ff52f74e77a6d2bccd Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 16:24:53 -0700 Subject: [PATCH 184/223] clean up regional case through black --- .../ctsm/site_and_regional/regional_case.py | 57 +++++++++++-------- 1 file changed, 32 insertions(+), 25 deletions(-) diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index eeff6f9dd7..57bb8474f4 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -66,25 +66,31 @@ class RegionalCase(BaseCase): """ def __init__( - self, - lat1, - lat2, - lon1, - lon2, - reg_name, + self, + lat1, + lat2, + lon1, + lon2, + reg_name, + create_domain, + create_surfdata, + create_landuse, + create_datm, + create_user_mods, + out_dir, + overwrite, + ): + """ + Initializes RegionalCase with the given arguments. + """ + super().__init__( create_domain, create_surfdata, create_landuse, create_datm, create_user_mods, - out_dir, overwrite, - ): - """ - Initializes RegionalCase with the given arguments. - """ - super().__init__(create_domain, create_surfdata, create_landuse, create_datm, - create_user_mods, overwrite) + ) self.lat1 = lat1 self.lat2 = lat2 self.lon1 = lon1 @@ -102,8 +108,9 @@ def create_tag(self): if self.reg_name: self.tag = self.reg_name else: - self.tag = "{}-{}_{}-{}".format(str(self.lon1), str(self.lon2), str(self.lat1), - str(self.lat2)) + self.tag = "{}-{}_{}-{}".format( + str(self.lon1), str(self.lon2), str(self.lat1), str(self.lat2) + ) def create_domain_at_reg(self, indir, file): """ @@ -133,7 +140,7 @@ def create_domain_at_reg(self, indir, file): # mode 'w' overwrites file wfile = os.path.join(self.out_dir, fdomain_out) - self.write_to_netcdf (f_out, wfile) + self.write_to_netcdf(f_out, wfile) logger.info("Successfully created file (fdomain_out) %s", wfile) f_in.close() f_out.close() @@ -167,7 +174,7 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir): # mode 'w' overwrites file wfile = os.path.join(self.out_dir, fsurf_out) - self.write_to_netcdf (f_out, wfile) + self.write_to_netcdf(f_out, wfile) logger.info("created file (fsurf_out) %s", wfile) f_in.close() f_out.close() @@ -180,8 +187,8 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir): def create_landuse_at_reg(self, indir, file, user_mods_dir): """ - Create land use data file for this RegionalCase class. - """ + Create land use data file for this RegionalCase class. + """ logger.info("Creating landuse file at region: %s", self.tag) @@ -192,9 +199,7 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir): logger.info("fluse_out: %s", os.path.join(self.out_dir, fluse_out)) # create 1d coordinate variables to enable sel() method - f_in = self.create_1d_coord( - fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat" - ) + f_in = self.create_1d_coord(fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") lat = f_in["lat"] lon = f_in["lon"] @@ -209,13 +214,15 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir): # mode 'w' overwrites file wfile = os.path.join(self.out_dir, fluse_out) - self.write_to_netcdf (f_out, wfile) + self.write_to_netcdf(f_out, wfile) logger.info("Successfully created file (fluse_out) %s", wfile) f_in.close() f_out.close() if self.create_user_mods: with open(os.path.join(user_mods_dir, "user_nl_clm"), "a") as nl_clm: - #line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) - line = "flanduse_timeseries = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) + # line = "landuse = '${}'".format(os.path.join(USRDAT_DIR, fluse_out)) + line = "flanduse_timeseries = '${}'".format( + os.path.join(USRDAT_DIR, fluse_out) + ) self.write_to_file(line, nl_clm) From d4554b3df1a762a85c318f5d025d82ef58a9e93f Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 21:46:02 -0700 Subject: [PATCH 185/223] changing names of pandas dataset. --- .../site_and_regional/single_point_case.py | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 9baed758fc..1813a12496 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -393,33 +393,34 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): ) self.write_to_file(line, nl_clm) - def modify_surfdata_atpoint(self, f_tmp): + def modify_surfdata_atpoint(self, f_orig): """ Function to modify surface dataset based on the user flags chosen. """ + f_mod = f_orig.copy (deep= True) # -- modify surface data properties if self.dom_pft is not None: max_dom_pft = max(self.dom_pft) # -- First initialize everything: if max_dom_pft < NAT_PFT: - f_tmp["PCT_NAT_PFT"][:, :, :] = 0 + f_mod["PCT_NAT_PFT"][:, :, :] = 0 else: - f_tmp["PCT_CFT"][:, :, :] = 0 + f_mod["PCT_CFT"][:, :, :] = 0 # Do we need to initialize these here? # Because we set them in include_nonveg - # f_tmp["PCT_NATVEG"][:, :] = 0 - # f_tmp["PCT_CROP"][:, :] = 0 + # f_mod["PCT_NATVEG"][:, :] = 0 + # f_mod["PCT_CROP"][:, :] = 0 # -- loop over all dom_pft and pct_pft zip_pfts = zip(self.dom_pft, self.pct_pft) for dom_pft, pct_pft in zip_pfts: if dom_pft < NAT_PFT: - f_tmp["PCT_NAT_PFT"][:, :, dom_pft] = pct_pft + f_mod["PCT_NAT_PFT"][:, :, dom_pft] = pct_pft else: dom_pft = dom_pft - NAT_PFT - f_tmp["PCT_CFT"][:, :, dom_pft] = pct_pft + f_mod["PCT_CFT"][:, :, dom_pft] = pct_pft # ------------------------------- # By default include_nonveg=False @@ -428,18 +429,18 @@ def modify_surfdata_atpoint(self, f_tmp): if not self.include_nonveg: logger.info("Zeroing out non-vegetation land units in the surface data.") - f_tmp["PCT_LAKE"][:, :] = 0.0 - f_tmp["PCT_WETLAND"][:, :] = 0.0 - f_tmp["PCT_URBAN"][:, :] = 0.0 - f_tmp["PCT_GLACIER"][:, :] = 0.0 + f_mod["PCT_LAKE"][:, :] = 0.0 + f_mod["PCT_WETLAND"][:, :] = 0.0 + f_mod["PCT_URBAN"][:, :] = 0.0 + f_mod["PCT_GLACIER"][:, :] = 0.0 max_dom_pft = max(self.dom_pft) if max_dom_pft < NAT_PFT: - f_tmp["PCT_NATVEG"][:, :] = 100 - f_tmp["PCT_CROP"][:, :] = 0 + f_mod["PCT_NATVEG"][:, :] = 100 + f_mod["PCT_CROP"][:, :] = 0 else: - f_tmp["PCT_NATVEG"][:, :] = 0 - f_tmp["PCT_CROP"][:, :] = 100 + f_mod["PCT_NATVEG"][:, :] = 0 + f_mod["PCT_CROP"][:, :] = 100 else: logger.info( @@ -448,11 +449,11 @@ def modify_surfdata_atpoint(self, f_tmp): ) if self.uni_snow: - f_tmp["STD_ELEV"][:, :] = 20.0 + f_mod["STD_ELEV"][:, :] = 20.0 if self.cap_saturation: - f_tmp["FMAX"][:, :] = 0.0 + f_mod["FMAX"][:, :] = 0.0 - return f_tmp + return f_mod def create_surfdata_at_point(self, indir, file, user_mods_dir): """ From 4903f4f14b91a48650d27107eba65462716b450d Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 21:47:10 -0700 Subject: [PATCH 186/223] fixing pct_urban dims but it was working previously. --- python/ctsm/site_and_regional/single_point_case.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 1813a12496..c6b687fdfa 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -431,7 +431,7 @@ def modify_surfdata_atpoint(self, f_orig): logger.info("Zeroing out non-vegetation land units in the surface data.") f_mod["PCT_LAKE"][:, :] = 0.0 f_mod["PCT_WETLAND"][:, :] = 0.0 - f_mod["PCT_URBAN"][:, :] = 0.0 + f_mod["PCT_URBAN"][:, :, :] = 0.0 f_mod["PCT_GLACIER"][:, :] = 0.0 max_dom_pft = max(self.dom_pft) From 2d88dc249dcafd1cdc4ac81c1a460793b477048a Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 22:31:52 -0700 Subject: [PATCH 187/223] updating unit tests. --- .../test/test_unit_singlept_data_surfdata.py | 322 +++++++++++++++++- 1 file changed, 321 insertions(+), 1 deletion(-) diff --git a/python/ctsm/test/test_unit_singlept_data_surfdata.py b/python/ctsm/test/test_unit_singlept_data_surfdata.py index 1e3d7e9f76..300238951f 100755 --- a/python/ctsm/test/test_unit_singlept_data_surfdata.py +++ b/python/ctsm/test/test_unit_singlept_data_surfdata.py @@ -261,6 +261,64 @@ def test_modify_surfdata_atpoint_nocrop_1pft_glacier(self): self.assertEqual(ds_out["PCT_GLACIER"].data[:, :], 0) + def test_modify_surfdata_atpoint_nocrop_1pft_wetland(self): + """ + Test modify_surfdata_atpoint + Checks WETLAND for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.dom_pft = [5] + + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out["PCT_WETLAND"].data[:, :], 0) + + def test_modify_surfdata_atpoint_nocrop_1pft_lake(self): + """ + Test modify_surfdata_atpoint + Checks PCT_LAKE for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.dom_pft = [5] + + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out["PCT_LAKE"].data[:, :], 0) + def test_modify_surfdata_atpoint_nocrop_1pft_unisnow(self): """ Test modify_surfdata_atpoint @@ -357,6 +415,107 @@ def test_modify_surfdata_atpoint_nocrop_multipft(self): # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) np.testing.assert_array_equal(ds_out["PCT_NAT_PFT"].data, expected_out) + def test_modify_surfdata_atpoint_nocrop_urban_nononveg(self): + """ + Test modify_surfdata_atpoint for crop cases + Checks URBAN for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = False + single_point.dom_pft = [7] + single_point.plat = [34.05] + single_point.plon = [118.25] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + expected_out = np.zeros((1, 1, 3)) + + # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) + np.testing.assert_array_equal(ds_out["PCT_URBAN"].data, expected_out) + + def test_modify_surfdata_atpoint_nocrop_urban_include_nonveg(self): + """ + Test modify_surfdata_atpoint for crop cases + Checks URBAN for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = True + single_point.dom_pft = [7] + single_point.plat = [34.05] + single_point.plon = [118.25] + + # -- change it to something known + self.ds_test['PCT_URBAN'][:,:,:] = 1 + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + expected_out = np.ones((1, 1, 3)) + + # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) + np.testing.assert_array_equal(ds_out["PCT_URBAN"].data, expected_out) + + def test_modify_surfdata_atpoint_nocrop_wetland_include_nonveg(self): + """ + Test modify_surfdata_atpoint for crop cases + Checks PCT_WETLAND for one pft to make sure it is not zerod-out + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = True + single_point.dom_pft = [7] + + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertNotEqual(ds_out['PCT_WETLAND'].data[:,:], 0) + class TestSinglePointCaseSurfaceCrop(unittest.TestCase): """ @@ -582,11 +741,70 @@ def test_modify_surfdata_atpoint_crop_1pft_glacier(self): overwrite=self.overwrite, ) single_point.dom_pft = [17] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) self.assertEqual(ds_out["PCT_GLACIER"].data[:, :], 0) - def test_modify_surfdata_atpoint_nocrop_1pft_unisnow(self): + def test_modify_surfdata_atpoint_crop_1pft_wetland(self): + """ + Test modify_surfdata_atpoint + Checks WETLAND for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.dom_pft = [17] + + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out["PCT_WETLAND"].data[:, :], 0) + + def test_modify_surfdata_atpoint_crop_1pft_lake(self): + """ + Test modify_surfdata_atpoint + Checks PCT_LAKE for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.dom_pft = [17] + + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertEqual(ds_out["PCT_LAKE"].data[:, :], 0) + + def test_modify_surfdata_atpoint_crop_1pft_unisnow(self): """ Test modify_surfdata_atpoint for crop cases Checks STD_ELV for one pft and unisnow @@ -615,6 +833,7 @@ def test_modify_surfdata_atpoint_nocrop_1pft_unisnow(self): self.assertEqual(ds_out["STD_ELEV"].data[:, :], 20) + def test_modify_surfdata_atpoint_crop_1pft_capsat(self): """ Test modify_surfdata_atpoint for crop cases @@ -679,6 +898,107 @@ def test_modify_surfdata_atpoint_crop_multipft(self): # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) np.testing.assert_array_equal(ds_out["PCT_CFT"].data, expected_out) + def test_modify_surfdata_atpoint_crop_urban_nononveg(self): + """ + Test modify_surfdata_atpoint for crop cases + Checks URBAN for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = False + single_point.dom_pft = [17] + single_point.plat = [34.05] + single_point.plon = [118.25] + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + expected_out = np.zeros((1, 1, 3)) + + # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) + np.testing.assert_array_equal(ds_out["PCT_URBAN"].data, expected_out) + + def test_modify_surfdata_atpoint_crop_urban_include_nonveg(self): + """ + Test modify_surfdata_atpoint for crop cases + Checks URBAN for one pft + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = True + single_point.dom_pft = [17] + single_point.plat = [34.05] + single_point.plon = [118.25] + + # -- change it to something known + self.ds_test['PCT_URBAN'][:,:,:] = 1 + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + expected_out = np.ones((1, 1, 3)) + + # self.assertEqual(ds_out['PCT_NAT_PFT'].data[:,:,5], 100) + np.testing.assert_array_equal(ds_out["PCT_URBAN"].data, expected_out) + + def test_modify_surfdata_atpoint_crop_lake_include_nonveg(self): + """ + Test modify_surfdata_atpoint for crop cases + Checks PCT_LAKE for one pft to make sure it is not zerod-out + """ + single_point = SinglePointCase( + plat=self.plat, + plon=self.plon, + site_name=self.site_name, + create_domain=self.create_domain, + create_surfdata=self.create_surfdata, + create_landuse=self.create_landuse, + create_datm=self.create_datm, + create_user_mods=self.create_user_mods, + dom_pft=self.dom_pft, + pct_pft=self.pct_pft, + num_pft=self.num_pft, + include_nonveg=self.include_nonveg, + uni_snow=self.uni_snow, + cap_saturation=self.cap_saturation, + out_dir=self.out_dir, + overwrite=self.overwrite, + ) + single_point.include_nonveg = True + single_point.dom_pft = [17] + + ds_out = single_point.modify_surfdata_atpoint(self.ds_test) + + self.assertNotEqual(ds_out['PCT_LAKE'].data[:,:], 0) + if __name__ == "__main__": unit_testing.setup_for_tests() From b4154f124d0f788829da73ab061c69f8a81e0af2 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 23:45:51 -0700 Subject: [PATCH 188/223] fewer lines for minor pylint complaints --- python/ctsm/test/test_unit_singlept_data_surfdata.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/python/ctsm/test/test_unit_singlept_data_surfdata.py b/python/ctsm/test/test_unit_singlept_data_surfdata.py index 300238951f..d744aa1bcc 100755 --- a/python/ctsm/test/test_unit_singlept_data_surfdata.py +++ b/python/ctsm/test/test_unit_singlept_data_surfdata.py @@ -164,7 +164,6 @@ def test_modify_surfdata_atpoint_nocrop_1pft_pctnatpft(self): out_dir=self.out_dir, overwrite=self.overwrite, ) - single_point.include_nonveg = False single_point.dom_pft = [5] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) @@ -197,7 +196,6 @@ def test_modify_surfdata_atpoint_nocrop_1pft_pctnatveg(self): out_dir=self.out_dir, overwrite=self.overwrite, ) - single_point.include_nonveg = False single_point.dom_pft = [5] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) @@ -226,7 +224,6 @@ def test_modify_surfdata_atpoint_nocrop_1pft_pctcrop(self): out_dir=self.out_dir, overwrite=self.overwrite, ) - single_point.include_nonveg = False single_point.dom_pft = [5] ds_out = single_point.modify_surfdata_atpoint(self.ds_test) @@ -516,12 +513,10 @@ def test_modify_surfdata_atpoint_nocrop_wetland_include_nonveg(self): self.assertNotEqual(ds_out['PCT_WETLAND'].data[:,:], 0) - class TestSinglePointCaseSurfaceCrop(unittest.TestCase): """ Basic class for testing creating and modifying surface dataset for crop cases (aka using 78 pft dataset) in SinglePointCase class in single_point_case.py. - """ plat = 20.1 From 6dbd29ed156379af46d7e7f2e361126479643a82 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 10 Feb 2022 23:53:57 -0700 Subject: [PATCH 189/223] blacking reformatter --- python/ctsm/test/test_unit_modify_fsurdat.py | 181 ++++++++++++------ .../test/test_unit_singlept_data_surfdata.py | 10 +- 2 files changed, 124 insertions(+), 67 deletions(-) diff --git a/python/ctsm/test/test_unit_modify_fsurdat.py b/python/ctsm/test/test_unit_modify_fsurdat.py index 4914924518..e3aa9320c7 100755 --- a/python/ctsm/test/test_unit_modify_fsurdat.py +++ b/python/ctsm/test/test_unit_modify_fsurdat.py @@ -19,9 +19,10 @@ # pylint: disable=protected-access + class TestModifyFsurdat(unittest.TestCase): """Tests the setvar_lev functions and the - _get_not_rectangle function + _get_not_rectangle function """ def test_setvarLev(self): @@ -36,7 +37,8 @@ def test_setvarLev(self): min_lon = 2 # expects min_lon < max_lon min_lat = 3 # expects min_lat < max_lat longxy, latixy, cols, rows = self._get_longxy_latixy( - _min_lon=min_lon, _max_lon=10, _min_lat=min_lat, _max_lat=12) + _min_lon=min_lon, _max_lon=10, _min_lat=min_lat, _max_lat=12 + ) # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 lon_1 = 3 @@ -47,39 +49,54 @@ def test_setvarLev(self): # create xarray dataset containing lev0, lev1, and lev2 variables; # the fsurdat_modify tool reads variables like this from fsurdat file var_1d = np.arange(cols) - var_lev2 = var_1d * np.ones((rows,cols,rows,cols)) - var_lev1 = var_1d * np.ones((cols,rows,cols)) - my_data = xr.Dataset(data_vars=dict( - LONGXY=(["x", "y"], longxy), # use LONGXY as var_lev0 - LATIXY=(["x", "y"], latixy), # __init__ expects LONGXY, LATIXY - var_lev1=(["w", "x", "y"], var_lev1), - var_lev2=(["v", "w", "x", "y"], var_lev2))) + var_lev2 = var_1d * np.ones((rows, cols, rows, cols)) + var_lev1 = var_1d * np.ones((cols, rows, cols)) + my_data = xr.Dataset( + data_vars=dict( + LONGXY=(["x", "y"], longxy), # use LONGXY as var_lev0 + LATIXY=(["x", "y"], latixy), # __init__ expects LONGXY, LATIXY + var_lev1=(["w", "x", "y"], var_lev1), + var_lev2=(["v", "w", "x", "y"], var_lev2), + ) + ) # create ModifyFsurdat object - modify_fsurdat = ModifyFsurdat(my_data=my_data, lon_1=lon_1, - lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, landmask_file=None) + modify_fsurdat = ModifyFsurdat( + my_data=my_data, + lon_1=lon_1, + lon_2=lon_2, + lat_1=lat_1, + lat_2=lat_2, + landmask_file=None, + ) # initialize and then modify the comparison matrices comp_lev0 = modify_fsurdat.file.LONGXY comp_lev1 = modify_fsurdat.file.var_lev1 comp_lev2 = modify_fsurdat.file.var_lev2 val_for_rectangle = 1.5 - comp_lev0[lat_1-min_lat:lat_2-min_lat+1, - lon_1-min_lon:lon_2-min_lon+1] = val_for_rectangle - comp_lev1[...,lat_1-min_lat:lat_2-min_lat+1, - lon_1-min_lon:lon_2-min_lon+1] = val_for_rectangle - comp_lev2[...,lat_1-min_lat:lat_2-min_lat+1, - lon_1-min_lon:lon_2-min_lon+1] = val_for_rectangle + comp_lev0[ + lat_1 - min_lat : lat_2 - min_lat + 1, lon_1 - min_lon : lon_2 - min_lon + 1 + ] = val_for_rectangle + comp_lev1[ + ..., + lat_1 - min_lat : lat_2 - min_lat + 1, + lon_1 - min_lon : lon_2 - min_lon + 1, + ] = val_for_rectangle + comp_lev2[ + ..., + lat_1 - min_lat : lat_2 - min_lat + 1, + lon_1 - min_lon : lon_2 - min_lon + 1, + ] = val_for_rectangle # test setvar - modify_fsurdat.setvar_lev0('LONGXY', val_for_rectangle) + modify_fsurdat.setvar_lev0("LONGXY", val_for_rectangle) np.testing.assert_array_equal(modify_fsurdat.file.LONGXY, comp_lev0) - modify_fsurdat.setvar_lev1('var_lev1', val_for_rectangle, cols-1) + modify_fsurdat.setvar_lev1("var_lev1", val_for_rectangle, cols - 1) np.testing.assert_array_equal(modify_fsurdat.file.var_lev1, comp_lev1) - modify_fsurdat.setvar_lev2('var_lev2', val_for_rectangle, cols-1, - rows-1) + modify_fsurdat.setvar_lev2("var_lev2", val_for_rectangle, cols - 1, rows - 1) np.testing.assert_array_equal(modify_fsurdat.file.var_lev2, comp_lev2) def test_getNotRectangle_lon1leLon2Lat1leLat2(self): @@ -96,7 +113,8 @@ def test_getNotRectangle_lon1leLon2Lat1leLat2(self): min_lon = 2 # expects min_lon < max_lon min_lat = 3 # expects min_lat < max_lat longxy, latixy, cols, rows = self._get_longxy_latixy( - _min_lon=min_lon, _max_lon=7, _min_lat=min_lat, _max_lat=8) + _min_lon=min_lon, _max_lon=7, _min_lat=min_lat, _max_lat=8 + ) # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 lon_1 = 3 @@ -104,16 +122,23 @@ def test_getNotRectangle_lon1leLon2Lat1leLat2(self): lat_1 = 6 lat_2 = 8 # lat_1 < lat_2 not_rectangle = ModifyFsurdat._get_not_rectangle( - lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, - longxy=longxy, latixy=latixy) - compare = np.ones((rows,cols)) + lon_1=lon_1, + lon_2=lon_2, + lat_1=lat_1, + lat_2=lat_2, + longxy=longxy, + latixy=latixy, + ) + compare = np.ones((rows, cols)) # assert this to confirm intuitive understanding of these matrices self.assertEqual(np.size(not_rectangle), np.size(compare)) # Hardwire where I expect not_rectangle to be False (0) # I have chosen the lon/lat ranges to match their corresponding index # values to keep this simple - compare[lat_1-min_lat:lat_2-min_lat+1, lon_1-min_lon:lon_2-min_lon+1] = 0 + compare[ + lat_1 - min_lat : lat_2 - min_lat + 1, lon_1 - min_lon : lon_2 - min_lon + 1 + ] = 0 np.testing.assert_array_equal(not_rectangle, compare) def test_getNotRectangle_lon1leLon2Lat1gtLat2(self): @@ -131,7 +156,8 @@ def test_getNotRectangle_lon1leLon2Lat1gtLat2(self): min_lon = -3 # expects min_lon < max_lon min_lat = -2 # expects min_lat < max_lat longxy, latixy, cols, rows = self._get_longxy_latixy( - _min_lon=min_lon, _max_lon=6, _min_lat=min_lat, _max_lat=5) + _min_lon=min_lon, _max_lon=6, _min_lat=min_lat, _max_lat=5 + ) # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 # I have chosen the lon/lat ranges to match their corresponding index @@ -141,17 +167,22 @@ def test_getNotRectangle_lon1leLon2Lat1gtLat2(self): lat_1 = 4 lat_2 = 0 # lat_1 > lat_2 not_rectangle = ModifyFsurdat._get_not_rectangle( - lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, - longxy=longxy, latixy=latixy) - compare = np.ones((rows,cols)) + lon_1=lon_1, + lon_2=lon_2, + lat_1=lat_1, + lat_2=lat_2, + longxy=longxy, + latixy=latixy, + ) + compare = np.ones((rows, cols)) # assert this to confirm intuitive understanding of these matrices self.assertEqual(np.size(not_rectangle), np.size(compare)) # Hardwire where I expect not_rectangle to be False (0) # I have chosen the lon/lat ranges to match their corresponding index # values to keep this simple - compare[:lat_2-min_lat+1, lon_1-min_lon:lon_2-min_lon+1] = 0 - compare[lat_1-min_lat:, lon_1-min_lon:lon_2-min_lon+1] = 0 + compare[: lat_2 - min_lat + 1, lon_1 - min_lon : lon_2 - min_lon + 1] = 0 + compare[lat_1 - min_lat :, lon_1 - min_lon : lon_2 - min_lon + 1] = 0 np.testing.assert_array_equal(not_rectangle, compare) def test_getNotRectangle_lon1gtLon2Lat1leLat2(self): @@ -169,7 +200,8 @@ def test_getNotRectangle_lon1gtLon2Lat1leLat2(self): min_lon = 1 # expects min_lon < max_lon min_lat = 1 # expects min_lat < max_lat longxy, latixy, cols, rows = self._get_longxy_latixy( - _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=90) + _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=90 + ) # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 # I have chosen the lon/lat ranges to match their corresponding index @@ -179,17 +211,22 @@ def test_getNotRectangle_lon1gtLon2Lat1leLat2(self): lat_1 = 2 lat_2 = 3 # lat_1 < lat_2 not_rectangle = ModifyFsurdat._get_not_rectangle( - lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, - longxy=longxy, latixy=latixy) - compare = np.ones((rows,cols)) + lon_1=lon_1, + lon_2=lon_2, + lat_1=lat_1, + lat_2=lat_2, + longxy=longxy, + latixy=latixy, + ) + compare = np.ones((rows, cols)) # assert this to confirm intuitive understanding of these matrices self.assertEqual(np.size(not_rectangle), np.size(compare)) # Hardwire where I expect not_rectangle to be False (0) # I have chosen the lon/lat ranges to match their corresponding index # values to keep this simple - compare[lat_1-min_lat:lat_2-min_lat+1, :lon_2-min_lon+1] = 0 - compare[lat_1-min_lat:lat_2-min_lat+1, lon_1-min_lon:] = 0 + compare[lat_1 - min_lat : lat_2 - min_lat + 1, : lon_2 - min_lon + 1] = 0 + compare[lat_1 - min_lat : lat_2 - min_lat + 1, lon_1 - min_lon :] = 0 np.testing.assert_array_equal(not_rectangle, compare) def test_getNotRectangle_lon1gtLon2Lat1gtLat2(self): @@ -207,7 +244,8 @@ def test_getNotRectangle_lon1gtLon2Lat1gtLat2(self): min_lon = -8 # expects min_lon < max_lon min_lat = -9 # expects min_lat < max_lat longxy, latixy, cols, rows = self._get_longxy_latixy( - _min_lon=min_lon, _max_lon=5, _min_lat=min_lat, _max_lat=6) + _min_lon=min_lon, _max_lon=5, _min_lat=min_lat, _max_lat=6 + ) # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 # I have chosen the lon/lat ranges to match their corresponding index @@ -217,19 +255,24 @@ def test_getNotRectangle_lon1gtLon2Lat1gtLat2(self): lat_1 = 0 lat_2 = -3 # lat_1 > lat_2 not_rectangle = ModifyFsurdat._get_not_rectangle( - lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, - longxy=longxy, latixy=latixy) - compare = np.ones((rows,cols)) + lon_1=lon_1, + lon_2=lon_2, + lat_1=lat_1, + lat_2=lat_2, + longxy=longxy, + latixy=latixy, + ) + compare = np.ones((rows, cols)) # assert this to confirm intuitive understanding of these matrices self.assertEqual(np.size(not_rectangle), np.size(compare)) # Hardwire where I expect not_rectangle to be False (0) # I have chosen the lon/lat ranges to match their corresponding index # values to keep this simple - compare[:lat_2-min_lat+1, :lon_2-min_lon+1] = 0 - compare[:lat_2-min_lat+1, lon_1-min_lon:] = 0 - compare[lat_1-min_lat:, :lon_2-min_lon+1] = 0 - compare[lat_1-min_lat:, lon_1-min_lon:] = 0 + compare[: lat_2 - min_lat + 1, : lon_2 - min_lon + 1] = 0 + compare[: lat_2 - min_lat + 1, lon_1 - min_lon :] = 0 + compare[lat_1 - min_lat :, : lon_2 - min_lon + 1] = 0 + compare[lat_1 - min_lat :, lon_1 - min_lon :] = 0 np.testing.assert_array_equal(not_rectangle, compare) def test_getNotRectangle_lonsStraddle0deg(self): @@ -247,7 +290,8 @@ def test_getNotRectangle_lonsStraddle0deg(self): min_lon = 0 # expects min_lon < max_lon min_lat = -5 # expects min_lat < max_lat longxy, latixy, cols, rows = self._get_longxy_latixy( - _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=5) + _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=5 + ) # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 # I have chosen the lon/lat ranges to match their corresponding index @@ -257,19 +301,24 @@ def test_getNotRectangle_lonsStraddle0deg(self): lat_1 = -4 lat_2 = -6 # lat_1 > lat_2 not_rectangle = ModifyFsurdat._get_not_rectangle( - lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, - longxy=longxy, latixy=latixy) - compare = np.ones((rows,cols)) + lon_1=lon_1, + lon_2=lon_2, + lat_1=lat_1, + lat_2=lat_2, + longxy=longxy, + latixy=latixy, + ) + compare = np.ones((rows, cols)) # assert this to confirm intuitive understanding of these matrices self.assertEqual(np.size(not_rectangle), np.size(compare)) # Hardwire where I expect not_rectangle to be False (0) # I have chosen the lon/lat ranges to match their corresponding index # values to keep this simple - compare[:lat_2-min_lat+1, :lon_2-min_lon+1] = 0 - compare[:lat_2-min_lat+1, lon_1-min_lon:] = 0 - compare[lat_1-min_lat:, :lon_2-min_lon+1] = 0 - compare[lat_1-min_lat:, lon_1-min_lon:] = 0 + compare[: lat_2 - min_lat + 1, : lon_2 - min_lon + 1] = 0 + compare[: lat_2 - min_lat + 1, lon_1 - min_lon :] = 0 + compare[lat_1 - min_lat :, : lon_2 - min_lon + 1] = 0 + compare[lat_1 - min_lat :, lon_1 - min_lon :] = 0 np.testing.assert_array_equal(not_rectangle, compare) def test_getNotRectangle_latsOutOfBounds(self): @@ -283,7 +332,8 @@ def test_getNotRectangle_latsOutOfBounds(self): min_lon = 0 # expects min_lon < max_lon min_lat = -5 # expects min_lat < max_lat longxy, latixy, _, _ = self._get_longxy_latixy( - _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=5) + _min_lon=min_lon, _max_lon=359, _min_lat=min_lat, _max_lat=5 + ) # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 # I have chosen the lon/lat ranges to match their corresponding index @@ -292,11 +342,17 @@ def test_getNotRectangle_latsOutOfBounds(self): lon_2 = 5 lat_1 = -91 lat_2 = 91 - with self.assertRaisesRegex(SystemExit, - "lat_1 and lat_2 need to be in the range -90 to 90"): + with self.assertRaisesRegex( + SystemExit, "lat_1 and lat_2 need to be in the range -90 to 90" + ): _ = ModifyFsurdat._get_not_rectangle( - lon_1=lon_1, lon_2=lon_2, lat_1=lat_1, lat_2=lat_2, - longxy=longxy, latixy=latixy) + lon_1=lon_1, + lon_2=lon_2, + lat_1=lat_1, + lat_2=lat_2, + longxy=longxy, + latixy=latixy, + ) def _get_longxy_latixy(self, _min_lon, _max_lon, _min_lat, _max_lat): """ @@ -307,7 +363,7 @@ def _get_longxy_latixy(self, _min_lon, _max_lon, _min_lat, _max_lat): long = np.arange(_min_lon, _max_lon + 1) long = [lon_range_0_to_360(longitude) for longitude in long] - longxy = long * np.ones((rows,cols)) + longxy = long * np.ones((rows, cols)) compare = np.repeat([long], rows, axis=0) # alternative way to form # assert this to confirm intuitive understanding of these matrices np.testing.assert_array_equal(longxy, compare) @@ -315,7 +371,7 @@ def _get_longxy_latixy(self, _min_lon, _max_lon, _min_lat, _max_lat): lati = np.arange(_min_lat, _max_lat + 1) self.assertEqual(min(lati), _min_lat) self.assertEqual(max(lati), _max_lat) - latixy_transp = lati * np.ones((cols,rows)) + latixy_transp = lati * np.ones((cols, rows)) compare = np.repeat([lati], cols, axis=0) # alternative way to form # assert this to confirm intuitive understanding of these matrices np.testing.assert_array_equal(latixy_transp, compare) @@ -323,6 +379,7 @@ def _get_longxy_latixy(self, _min_lon, _max_lon, _min_lat, _max_lat): return longxy, latixy, cols, rows -if __name__ == '__main__': + +if __name__ == "__main__": unit_testing.setup_for_tests() unittest.main() diff --git a/python/ctsm/test/test_unit_singlept_data_surfdata.py b/python/ctsm/test/test_unit_singlept_data_surfdata.py index d744aa1bcc..c2392e766e 100755 --- a/python/ctsm/test/test_unit_singlept_data_surfdata.py +++ b/python/ctsm/test/test_unit_singlept_data_surfdata.py @@ -475,7 +475,7 @@ def test_modify_surfdata_atpoint_nocrop_urban_include_nonveg(self): single_point.plon = [118.25] # -- change it to something known - self.ds_test['PCT_URBAN'][:,:,:] = 1 + self.ds_test["PCT_URBAN"][:, :, :] = 1 ds_out = single_point.modify_surfdata_atpoint(self.ds_test) expected_out = np.ones((1, 1, 3)) @@ -511,7 +511,8 @@ def test_modify_surfdata_atpoint_nocrop_wetland_include_nonveg(self): ds_out = single_point.modify_surfdata_atpoint(self.ds_test) - self.assertNotEqual(ds_out['PCT_WETLAND'].data[:,:], 0) + self.assertNotEqual(ds_out["PCT_WETLAND"].data[:, :], 0) + class TestSinglePointCaseSurfaceCrop(unittest.TestCase): """ @@ -828,7 +829,6 @@ def test_modify_surfdata_atpoint_crop_1pft_unisnow(self): self.assertEqual(ds_out["STD_ELEV"].data[:, :], 20) - def test_modify_surfdata_atpoint_crop_1pft_capsat(self): """ Test modify_surfdata_atpoint for crop cases @@ -956,7 +956,7 @@ def test_modify_surfdata_atpoint_crop_urban_include_nonveg(self): single_point.plon = [118.25] # -- change it to something known - self.ds_test['PCT_URBAN'][:,:,:] = 1 + self.ds_test["PCT_URBAN"][:, :, :] = 1 ds_out = single_point.modify_surfdata_atpoint(self.ds_test) expected_out = np.ones((1, 1, 3)) @@ -992,7 +992,7 @@ def test_modify_surfdata_atpoint_crop_lake_include_nonveg(self): ds_out = single_point.modify_surfdata_atpoint(self.ds_test) - self.assertNotEqual(ds_out['PCT_LAKE'].data[:,:], 0) + self.assertNotEqual(ds_out["PCT_LAKE"].data[:, :], 0) if __name__ == "__main__": From e2218dc17967ba63cd60ed65bafd677e74c05036 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 11 Feb 2022 00:01:24 -0700 Subject: [PATCH 190/223] running this through black --- python/ctsm/site_and_regional/single_point_case.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index c6b687fdfa..f290142272 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -397,7 +397,7 @@ def modify_surfdata_atpoint(self, f_orig): """ Function to modify surface dataset based on the user flags chosen. """ - f_mod = f_orig.copy (deep= True) + f_mod = f_orig.copy(deep=True) # -- modify surface data properties if self.dom_pft is not None: From e61fbcc5991be723c59ec63739664129bd3bb523 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 15 Feb 2022 09:16:01 -0700 Subject: [PATCH 191/223] Add KONA, US-UMB, and regional tests for subset_data --- test/tools/README.testnames | 5 +++-- test/tools/input_tests_master | 4 ++++ test/tools/nl_files/subset_data_KONA | 1 + test/tools/nl_files/subset_data_US-UMB | 1 + test/tools/nl_files/subset_data_YELL | 2 +- test/tools/nl_files/subset_data_f09_US_pt | 1 + 6 files changed, 11 insertions(+), 3 deletions(-) create mode 100644 test/tools/nl_files/subset_data_KONA create mode 100644 test/tools/nl_files/subset_data_US-UMB create mode 100644 test/tools/nl_files/subset_data_f09_US_pt diff --git a/test/tools/README.testnames b/test/tools/README.testnames index 58222d9333..21563752f3 100644 --- a/test/tools/README.testnames +++ b/test/tools/README.testnames @@ -42,8 +42,9 @@ m is the resolution 8 -- US-UMB 9 -- 4x5 a -- NEON YELL -c -- US-UMB with cycling on forcing and transient use-case -g -- US-UMB with global forcing and grid PFT and soil +b -- NEON KONA +c -- unused +g -- unused y -- 1.9x2.5 with transient 1850-2100 for rcp=2.6 and glacier-MEC on T -- 1x1_numaIA Z -- 10x15 with crop on diff --git a/test/tools/input_tests_master b/test/tools/input_tests_master index 257bef8d0a..0b281c4d9c 100644 --- a/test/tools/input_tests_master +++ b/test/tools/input_tests_master @@ -38,6 +38,10 @@ bl0a1 TBLscript_tools.sh site_and_regional run_neon.py run_neon_OSBS smba1 TSMscript_tools.sh site_and_regional subset_data subset_data_YELL blba1 TBLscript_tools.sh site_and_regional subset_data subset_data_YELL +smbb1 TSMscript_tools.sh site_and_regional subset_data subset_data_KONA +blbb1 TBLscript_tools.sh site_and_regional subset_data subset_data_KONA +smb81 TSMscript_tools.sh site_and_regional subset_data subset_data_US-UMB +blb81 TBLscript_tools.sh site_and_regional subset_data subset_data_US-UMB smaa2 TSMscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL blaa2 TBLscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL diff --git a/test/tools/nl_files/subset_data_KONA b/test/tools/nl_files/subset_data_KONA new file mode 100644 index 0000000000..ffdd7c1346 --- /dev/null +++ b/test/tools/nl_files/subset_data_KONA @@ -0,0 +1 @@ +point --lon 263.38956 --lat 39.1082 --site KONA --dompft 17,19,23,45 --pctpft 28,12,32,28 --crop --create-domain --create-surface --outdir EXEDIR/KONA_user-mod_and_data --user-mods-dir EXEDIR/KONA_user-mod_and_data --verbose diff --git a/test/tools/nl_files/subset_data_US-UMB b/test/tools/nl_files/subset_data_US-UMB new file mode 100644 index 0000000000..499b5f53fd --- /dev/null +++ b/test/tools/nl_files/subset_data_US-UMB @@ -0,0 +1 @@ +point --lon 275.28626 --lat 45.5598 --site 1x1_US-UMB --dompft 7 --cap-saturation --uniform-snowpack --create-surface --outdir EXEDIR/US-UMB_user-mod_and_data --user-mods-dir EXEDIR/US-UMB_user-mod_and_data --verbose diff --git a/test/tools/nl_files/subset_data_YELL b/test/tools/nl_files/subset_data_YELL index 99ffcbaaed..5e142713df 100644 --- a/test/tools/nl_files/subset_data_YELL +++ b/test/tools/nl_files/subset_data_YELL @@ -1 +1 @@ ---verbose point --lon 250.45804 --lat 44.95597 --site YELL --dompft 1 --crop --create-domain --create-datm --datm-syr 2000 --create-surface --datm-eyr 2000 --outdir EXEDIR/YELL_user-mod_and_data --user-mods-dir EXEDIR/YELL_user-mod_and_data +point --lon 250.45804 --lat 44.95597 --site YELL --dompft 1 --crop --create-domain --create-surface --outdir EXEDIR/YELL_user-mod_and_data --user-mods-dir EXEDIR/YELL_user-mod_and_data --verbose diff --git a/test/tools/nl_files/subset_data_f09_US_pt b/test/tools/nl_files/subset_data_f09_US_pt new file mode 100644 index 0000000000..4acdfeabd4 --- /dev/null +++ b/test/tools/nl_files/subset_data_f09_US_pt @@ -0,0 +1 @@ +point --lon 257.5 --lat 43.822 --site 1x1_ --include-nonveg --crop --create-landuse --create-datm --create-user-mods --datm-syr 2000 --datm-eyr 2000 --create-surface --outdir EXEDIR/f09_US_pt_user-mod_and_data --user-mods-dir EXEDIR/f09_US_pt_user-mod_and_data --verbose From 88863277a232ead12fb32eca8275d9f1ed08aa16 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 15 Feb 2022 16:48:50 -0700 Subject: [PATCH 192/223] Back out cdeps version because of cdeps issue ESCOMP/CDEPS#144 --- Externals.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals.cfg b/Externals.cfg index 21b6eb6bbb..f596a4bd4f 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -55,7 +55,7 @@ local_path = components/cmeps required = True [cdeps] -tag = cdeps0.12.37 +tag = cdeps0.12.35 protocol = git repo_url = https://github.com/ESCOMP/CDEPS.git local_path = components/cdeps From c31b584001bdcf9accad836117ee95e8823a533e Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 16 Feb 2022 15:32:15 -0700 Subject: [PATCH 193/223] Start on change files --- doc/ChangeLog | 122 ++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 123 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index a93c31092f..fe641d75f8 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,126 @@ =============================================================== +Tag name: ctsm5.1.dev075 +Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326) +Date: Wed Feb 16 15:09:41 MST 2022 +One-line Summary: Small answer changes: urban ventilation, fire-emission, irrigate off when not crop, fix two SSP ndep files + +Purpose and description of changes +---------------------------------- + +The effects of ventilation (exchange of building air with canopy air) are accounted for in the energy budget inside the building. +The effects on urban canopy air are not. This remedies that. + +This fixes an issue with how coefficients are applied to individual fire components. + +SSP370 ndep file update. Fix SSP245 ndep file. Turn irrigation off for crop off + +ccs_config_cesm + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed or introduced +------------------------ +Issues fixed (include CTSM Issue #): +#1526 +#509 +#1578 +#1631 + +Notes of particular relevance for users +--------------------------------------- + +Caveats for users (e.g., need to interpolate initial conditions): + +Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables): + +Changes made to namelist defaults (e.g., changed parameter values): + +Changes to the datasets (e.g., parameter, surface or initial files): + +Notes of particular relevance for developers: +--------------------------------------------- + +Caveats for developers (e.g., code that is duplicated that requires double maintenance): + +Changes to tests or testing: + + +Testing summary: regular +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + cheyenne - PASS + + tools-tests (test/tools) (if tools have been changed): + + cheyenne - + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + cheyenne - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- PASS + izumi ------- OK + +If the tag used for baseline comparisons was NOT the previous tag, note that here: previous + + +Answer changes +-------------- + +Changes answers relative to baseline: Yes + + Summarize any changes to answers, i.e., + - what code configurations: Most + - what platforms/compilers: All + - nature of change: Similar climate + + If this tag changes climate describe the run(s) done to evaluate the new + climate (put details of the simulations in the experiment database) + - casename: oleson/ctsm51_ctsm51d61_1deg_GSWP3V1_CON_VENT_2000 + + URL for LMWG diagnostics output used to validate new climate: + https://webext.cgd.ucar.edu/I2000/ctsm51_ctsm51d61_1deg_GSWP3V1_CON_VENT_2000/lnd/ctsm51_ctsm51d61_1deg_GSWP3V1_CON_VENT_2000.1991_2010-ctsm51_ctsm51d61_1deg_GSWP3V1_CON_2000.1991_2010/set2/set2.html + + +Other details +------------- + +List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): + cism to cismwrap_2_1_95 + cime to cime6.0.13 + cmeps to cmeps0.13.47 + cdeps to cdeps0.12.35 + cpl7 to cpl7.0.12 + pio to pio2_5_5 + +Pull Requests that document the changes (include PR ids): +(https://github.com/ESCOMP/ctsm/pull) + #1535 -- Ventilation flux to urban canyon + #1532 -- Fire emissions bug fix + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev074 Originator(s): slevis (Samuel Levis,SLevis Consulting,303-665-1310) Date: Wed Feb 2 00:44:27 MST 2022 diff --git a/doc/ChangeSum b/doc/ChangeSum index 93e87f9495..26b3c9f754 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev075 erik 02/16/2022 Small answer changes: urban ventilation, fire-emission, irrigate off when not crop, fix two SSP ndep files ctsm5.1.dev074 slevis 02/02/2022 Introduce vert. resolved MIMICS as new method to solve below ground decomp. ctsm5.1.dev073 sacks 01/25/2022 Some fixes for Gregorian calendar ctsm5.1.dev072 negins 01/17/2022 mksurfdat toolchain part 1: gen_mksurf_namelist From 91513107d1a023066c4b9899ce73b60b3cda251d Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 16 Feb 2022 21:36:06 -0700 Subject: [PATCH 194/223] Update change files --- doc/ChangeLog | 51 +++++++++++++++++++++++++++------------------------ 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/doc/ChangeLog b/doc/ChangeLog index fe641d75f8..9f5b185ff6 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,20 +1,22 @@ =============================================================== Tag name: ctsm5.1.dev075 Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326) -Date: Wed Feb 16 15:09:41 MST 2022 +Date: Wed Feb 16 21:35:40 MST 2022 One-line Summary: Small answer changes: urban ventilation, fire-emission, irrigate off when not crop, fix two SSP ndep files Purpose and description of changes ---------------------------------- -The effects of ventilation (exchange of building air with canopy air) are accounted for in the energy budget inside the building. -The effects on urban canopy air are not. This remedies that. +The effects of ventilation (exchange of building air with canopy air) are accounted for in the +energy budget inside the building. The effects on urban canopy air are not. This remedies that. -This fixes an issue with how coefficients are applied to individual fire components. +This fixes an issue with how coefficients are applied to individual fire components. Allow units +to be in molecules/m2/sec as well as kg/m2/sec. -SSP370 ndep file update. Fix SSP245 ndep file. Turn irrigation off for crop off +SSP3-7.0 Nitrogen-deposition file update. Fix SSP2-4.5 Nitrogen-deposition file. Turn irrigation +off by default except for future scenarios. -ccs_config_cesm +Add new external ccs_config_cesm which has the CESM cime configuration files. Significant changes to scientifically-supported configurations -------------------------------------------------------------- @@ -24,11 +26,11 @@ Does this tag change answers significantly for any of the following physics conf [Put an [X] in the box for any configuration with significant answer changes.] -[ ] clm5_1 +[x] clm5_1 -[ ] clm5_0 +[x] clm5_0 -[ ] ctsm5_0-nwp +[x] ctsm5_0-nwp [ ] clm4_5 @@ -36,29 +38,33 @@ Does this tag change answers significantly for any of the following physics conf Bugs fixed or introduced ------------------------ Issues fixed (include CTSM Issue #): -#1526 -#509 -#1578 -#1631 + Fixes #1526 -- Switch FATES methane test for methane off + Fixes #509 --- irrigate in 1850 is off for runs with use_crop but on for those without + Fixes #1578 -- Need to replace SSP370 ndep file with new one + Fixes #1631 -- ndep file for SSP2-4.5 is incorrect + +Known bugs introduced in this tag (include issue #): + #1653 -- Use secsphr in UrbBuildTempOleson2015Mod.F90 Notes of particular relevance for users --------------------------------------- Caveats for users (e.g., need to interpolate initial conditions): - -Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables): + using older restart files will start ventilation flux at zero. + default fsurdat selection can also match irrigate true surface datasets Changes made to namelist defaults (e.g., changed parameter values): + irrigate is now default off, except for future scenarios Changes to the datasets (e.g., parameter, surface or initial files): + New Nitrogen-deposition file for SSP3-7.0 + Correct Nitrogen-deposition file for SSP2-4.5 Notes of particular relevance for developers: --------------------------------------------- -Caveats for developers (e.g., code that is duplicated that requires double maintenance): - Changes to tests or testing: - + FATES methane on test switched to methane off, since methane on is the default Testing summary: regular ---------------- @@ -67,11 +73,7 @@ Testing summary: regular build-namelist tests (if CLMBuildNamelist.pm has changed): - cheyenne - PASS - - tools-tests (test/tools) (if tools have been changed): - - cheyenne - + cheyenne - PASS (345 tests are different than baseline) python testing (if python code has changed; see instructions in python/README.md; document testing done): @@ -91,7 +93,8 @@ Answer changes Changes answers relative to baseline: Yes Summarize any changes to answers, i.e., - - what code configurations: Most + - what code configurations: clm5_0/clm5_1 urban ventilation flux, + irrigate off now, SSP3-7.0/SSP2-4.5 new ndep file, fire-emissions different - what platforms/compilers: All - nature of change: Similar climate From 4ae0dd980b3c365a10ce7cacf1a538ec2ee8741a Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Thu, 17 Feb 2022 12:34:42 -0700 Subject: [PATCH 195/223] Add test for a point from the global f09 grid for subset_data --- test/tools/README.testnames | 2 +- test/tools/input_tests_master | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/test/tools/README.testnames b/test/tools/README.testnames index 21563752f3..18ad04f4de 100644 --- a/test/tools/README.testnames +++ b/test/tools/README.testnames @@ -43,7 +43,7 @@ m is the resolution 9 -- 4x5 a -- NEON YELL b -- NEON KONA -c -- unused +c -- single point from the 0.9x1.25 grid g -- unused y -- 1.9x2.5 with transient 1850-2100 for rcp=2.6 and glacier-MEC on T -- 1x1_numaIA diff --git a/test/tools/input_tests_master b/test/tools/input_tests_master index 0b281c4d9c..2a64d80640 100644 --- a/test/tools/input_tests_master +++ b/test/tools/input_tests_master @@ -42,6 +42,8 @@ smbb1 TSMscript_tools.sh site_and_regional subset_data subset_data_KONA blbb1 TBLscript_tools.sh site_and_regional subset_data subset_data_KONA smb81 TSMscript_tools.sh site_and_regional subset_data subset_data_US-UMB blb81 TBLscript_tools.sh site_and_regional subset_data subset_data_US-UMB +smbc1 TSMscript_tools.sh site_and_regional subset_data subset_data_f09_US_pt +blbc1 TBLscript_tools.sh site_and_regional subset_data subset_data_f09_US_pt smaa2 TSMscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL blaa2 TBLscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL From 5902cd3d77ba3f0f18554a87915cf6de7b8acefd Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Thu, 17 Feb 2022 12:45:56 -0700 Subject: [PATCH 196/223] Changes to get modify_data_YELL and subset_data_KONA tests to pass, had to make a change in modify_singlept_site_neon.py --- test/tools/nl_files/subset_data_KONA | 2 +- tools/site_and_regional/modify_singlept_site_neon.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/tools/nl_files/subset_data_KONA b/test/tools/nl_files/subset_data_KONA index ffdd7c1346..cb743f2b45 100644 --- a/test/tools/nl_files/subset_data_KONA +++ b/test/tools/nl_files/subset_data_KONA @@ -1 +1 @@ -point --lon 263.38956 --lat 39.1082 --site KONA --dompft 17,19,23,45 --pctpft 28,12,32,28 --crop --create-domain --create-surface --outdir EXEDIR/KONA_user-mod_and_data --user-mods-dir EXEDIR/KONA_user-mod_and_data --verbose +point --lon 263.38956 --lat 39.1082 --site KONA --dompft 17 19 23 45 --pctpft 28 12 32 28 --crop --create-domain --create-surface --outdir EXEDIR/KONA_user-mod_and_data --user-mods-dir EXEDIR/KONA_user-mod_and_data --verbose diff --git a/tools/site_and_regional/modify_singlept_site_neon.py b/tools/site_and_regional/modify_singlept_site_neon.py index 924bb7cccc..e271dcaf60 100755 --- a/tools/site_and_regional/modify_singlept_site_neon.py +++ b/tools/site_and_regional/modify_singlept_site_neon.py @@ -251,7 +251,7 @@ def find_surffile(surf_dir, site_name): """ # sf_name = "surfdata_hist_16pfts_Irrig_CMIP6_simyr2000_"+site_name+"*.nc" - sf_name = "surfdata_*_hist_78pfts_CMIP6_simyr2000_" + site_name + "*.nc" + sf_name = "surfdata_hist_78pfts_CMIP6_simyr2000_" + site_name + "*.nc" print (os.path.join(surf_dir , sf_name)) surf_file = sorted(glob.glob(os.path.join(surf_dir , sf_name))) From 985b00ed0e51dac5b9f0b9d3573cd31f183be890 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 17 Feb 2022 14:11:14 -0700 Subject: [PATCH 197/223] adding test for region1 --- test/tools/README.testnames | 1 + test/tools/input_tests_master | 1 + test/tools/nl_files/subset_data_region1 | 1 + test/tools/tests_pretag_cheyenne_nompi | 1 + 4 files changed, 4 insertions(+) create mode 100644 test/tools/nl_files/subset_data_region1 diff --git a/test/tools/README.testnames b/test/tools/README.testnames index 18ad04f4de..11d9e23d4c 100644 --- a/test/tools/README.testnames +++ b/test/tools/README.testnames @@ -43,6 +43,7 @@ m is the resolution 9 -- 4x5 a -- NEON YELL b -- NEON KONA +d -- region1 c -- single point from the 0.9x1.25 grid g -- unused y -- 1.9x2.5 with transient 1850-2100 for rcp=2.6 and glacier-MEC on diff --git a/test/tools/input_tests_master b/test/tools/input_tests_master index 2a64d80640..d06b493eb2 100644 --- a/test/tools/input_tests_master +++ b/test/tools/input_tests_master @@ -44,6 +44,7 @@ smb81 TSMscript_tools.sh site_and_regional subset_data subset_data_US-UMB blb81 TBLscript_tools.sh site_and_regional subset_data subset_data_US-UMB smbc1 TSMscript_tools.sh site_and_regional subset_data subset_data_f09_US_pt blbc1 TBLscript_tools.sh site_and_regional subset_data subset_data_f09_US_pt +smbd1 TBLscript_tools.sh site_and_regional subset_data subset_data_region1 smaa2 TSMscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL blaa2 TBLscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL diff --git a/test/tools/nl_files/subset_data_region1 b/test/tools/nl_files/subset_data_region1 new file mode 100644 index 0000000000..c1c5607239 --- /dev/null +++ b/test/tools/nl_files/subset_data_region1 @@ -0,0 +1 @@ +region --lat1 -40 --lat2 15 --lon1 275 --lon2 330 --create-domain --create-surface --create-landuse --verbose --overwrite --reg test1 diff --git a/test/tools/tests_pretag_cheyenne_nompi b/test/tools/tests_pretag_cheyenne_nompi index 8075eab50b..b36cbace1f 100644 --- a/test/tools/tests_pretag_cheyenne_nompi +++ b/test/tools/tests_pretag_cheyenne_nompi @@ -4,6 +4,7 @@ smg54 blg54 sm0a1 bl0a1 smaa2 blaa2 smba1 blba1 +smbd1 blbd1 smi04 bli04 smi24 bli24 smi53 bli53 From 2850dfb158409c27dd81e6b04d031bbd800f3fe5 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 17 Feb 2022 14:12:26 -0700 Subject: [PATCH 198/223] modifying both versions of files from subset_data. --- tools/site_and_regional/modify_singlept_site_neon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/site_and_regional/modify_singlept_site_neon.py b/tools/site_and_regional/modify_singlept_site_neon.py index e271dcaf60..1928653dcf 100755 --- a/tools/site_and_regional/modify_singlept_site_neon.py +++ b/tools/site_and_regional/modify_singlept_site_neon.py @@ -251,7 +251,7 @@ def find_surffile(surf_dir, site_name): """ # sf_name = "surfdata_hist_16pfts_Irrig_CMIP6_simyr2000_"+site_name+"*.nc" - sf_name = "surfdata_hist_78pfts_CMIP6_simyr2000_" + site_name + "*.nc" + sf_name = "surfdata_*hist_78pfts_CMIP6_simyr2000_" + site_name + "*.nc" print (os.path.join(surf_dir , sf_name)) surf_file = sorted(glob.glob(os.path.join(surf_dir , sf_name))) From ef8a9d97aa5bcb55ac4cf70e9668d143d6c4580c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 17 Feb 2022 14:17:29 -0700 Subject: [PATCH 199/223] updating the surf_wrapper to point to local directory instead of scratch --- tools/site_and_regional/neon_surf_wrapper.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/site_and_regional/neon_surf_wrapper.py b/tools/site_and_regional/neon_surf_wrapper.py index 3c58b1cd51..df58d3ab36 100755 --- a/tools/site_and_regional/neon_surf_wrapper.py +++ b/tools/site_and_regional/neon_surf_wrapper.py @@ -101,7 +101,8 @@ def main(): '--create-surface','--uniform-snowpack','--cap-saturation','--verbose'] execute(command) - command = ['./modify_singlept_site_neon.py','--neon_site',site] + command = ['./modify_singlept_site_neon.py','--neon_site',site, '--surf_dir', + 'subset_data_single_point'] execute(command) if __name__ == "__main__": From ade3853507bb17956da57eec7944ceaf905e1338 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 17 Feb 2022 16:53:20 -0700 Subject: [PATCH 200/223] fixing the test lists --- test/tools/input_tests_master | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/tools/input_tests_master b/test/tools/input_tests_master index d06b493eb2..f3e46d50b5 100644 --- a/test/tools/input_tests_master +++ b/test/tools/input_tests_master @@ -44,7 +44,8 @@ smb81 TSMscript_tools.sh site_and_regional subset_data subset_data_US-UMB blb81 TBLscript_tools.sh site_and_regional subset_data subset_data_US-UMB smbc1 TSMscript_tools.sh site_and_regional subset_data subset_data_f09_US_pt blbc1 TBLscript_tools.sh site_and_regional subset_data subset_data_f09_US_pt -smbd1 TBLscript_tools.sh site_and_regional subset_data subset_data_region1 +smbd1 TSMscript_tools.sh site_and_regional subset_data subset_data_region1 +blbd1 TBLscript_tools.sh site_and_regional subset_data subset_data_region1 smaa2 TSMscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL blaa2 TBLscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL From 55cbf35e2d463dc931be862a8a64fd89853d3ef3 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 17 Feb 2022 16:56:05 -0700 Subject: [PATCH 201/223] new tests added --- test/tools/tests_pretag_cheyenne_nompi | 2 -- test/tools/tests_pretag_cheyenne_nompi_neon | 3 +++ 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 test/tools/tests_pretag_cheyenne_nompi_neon diff --git a/test/tools/tests_pretag_cheyenne_nompi b/test/tools/tests_pretag_cheyenne_nompi index b36cbace1f..19e96594bf 100644 --- a/test/tools/tests_pretag_cheyenne_nompi +++ b/test/tools/tests_pretag_cheyenne_nompi @@ -1,8 +1,6 @@ smi79 bli79 smc#4 blc#4 smg54 blg54 -sm0a1 bl0a1 -smaa2 blaa2 smba1 blba1 smbd1 blbd1 smi04 bli04 diff --git a/test/tools/tests_pretag_cheyenne_nompi_neon b/test/tools/tests_pretag_cheyenne_nompi_neon new file mode 100644 index 0000000000..1f72cadb53 --- /dev/null +++ b/test/tools/tests_pretag_cheyenne_nompi_neon @@ -0,0 +1,3 @@ +sm0a1 bl0a1 +smaa2 blaa2 + From ba08c135fff588678103921eee78a01bb8f9e720 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 17 Feb 2022 16:57:43 -0700 Subject: [PATCH 202/223] updating README for neon site --- test/tools/README | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/tools/README b/test/tools/README index cb5dcdec34..ed41e0fb96 100644 --- a/test/tools/README +++ b/test/tools/README @@ -29,6 +29,10 @@ release tests qcmd -l walltime=10:00:00 -- env CLM_INPUT_TESTS=`pwd`/tests_posttag_nompi_regression \ ./test_driver.sh -i >& run.out & +To run neon-specific tests, please use login nodes: +env CLM_INPUT_TESTS=`pwd`/tests_pretag_cheyenne_nompi_neon ./test_driver.sh -i > & run.out & + + Intended for use on NCAR machines cheyenne, geyser (DAV) and hobart. II. RUNNING test_driver.sh TOOLS TESTING: From e532b2b998be46328c4c9e7a2f07d2bf931c1b17 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 17 Feb 2022 17:05:17 -0700 Subject: [PATCH 203/223] updating README file list names --- test/tools/README | 2 +- ...tests_pretag_cheyenne_nompi_neon => tests_pretag_nompi_neon} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename test/tools/{tests_pretag_cheyenne_nompi_neon => tests_pretag_nompi_neon} (100%) diff --git a/test/tools/README b/test/tools/README index ed41e0fb96..ed96fb4670 100644 --- a/test/tools/README +++ b/test/tools/README @@ -30,7 +30,7 @@ qcmd -l walltime=10:00:00 -- env CLM_INPUT_TESTS=`pwd`/tests_posttag_nompi_regre ./test_driver.sh -i >& run.out & To run neon-specific tests, please use login nodes: -env CLM_INPUT_TESTS=`pwd`/tests_pretag_cheyenne_nompi_neon ./test_driver.sh -i > & run.out & +env CLM_INPUT_TESTS=`pwd`/tests_pretag_nompi_neon ./test_driver.sh -i > & run.out & Intended for use on NCAR machines cheyenne, geyser (DAV) and hobart. diff --git a/test/tools/tests_pretag_cheyenne_nompi_neon b/test/tools/tests_pretag_nompi_neon similarity index 100% rename from test/tools/tests_pretag_cheyenne_nompi_neon rename to test/tools/tests_pretag_nompi_neon From 6911b5e5ed6b0e26a64eeb2fa2906c1593d13624 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 17 Feb 2022 17:27:49 -0700 Subject: [PATCH 204/223] merge updates needed --- python/ctsm/git_utils.py | 1 - python/ctsm/toolchain/ctsm_case.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/python/ctsm/git_utils.py b/python/ctsm/git_utils.py index 70b0f7a511..65648a2fbf 100644 --- a/python/ctsm/git_utils.py +++ b/python/ctsm/git_utils.py @@ -65,4 +65,3 @@ def get_ctsm_git_describe(): .decode() ) return label - diff --git a/python/ctsm/toolchain/ctsm_case.py b/python/ctsm/toolchain/ctsm_case.py index 86a6861efb..fe077ef3aa 100755 --- a/python/ctsm/toolchain/ctsm_case.py +++ b/python/ctsm/toolchain/ctsm_case.py @@ -13,7 +13,7 @@ from datetime import datetime -from ctsm.git_utils import tag_describe +from ctsm.git_utils import get_ctsm_git_describe # -- import local classes for this script logger = logging.getLogger(__name__) @@ -267,7 +267,7 @@ def create_namelist_file(self): self.build_namelist_filename() with open(self.namelist_fname, "w", encoding='utf-8') as namelist_file: - label = tag_describe() + label = get_ctsm_git_describe() dst_mesh = which_mesh(self.res) From c179615ca9d4ceee8f01eafa8b5d50dd5af38fd3 Mon Sep 17 00:00:00 2001 From: Bowen Fang Date: Fri, 18 Feb 2022 22:19:17 +0800 Subject: [PATCH 205/223] Read in PCT_URBAN_MAX and initialize urban accordingly. Also updated variable name from HASURBAN to PCT_URBAN_MAX (https://github.com/ESCOMP/CTSM/issues/1572#issuecomment-998214741). --- src/main/surfrdMod.F90 | 60 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/src/main/surfrdMod.F90 b/src/main/surfrdMod.F90 index 4e121a2c54..f4ecaa3912 100644 --- a/src/main/surfrdMod.F90 +++ b/src/main/surfrdMod.F90 @@ -79,6 +79,7 @@ subroutine surfrd_get_data (begg, endg, ldomain, lfsurdat, actual_numcft) use landunit_varcon , only : max_lunit, istsoil, isturb_MIN, isturb_MAX use dynSubgridControlMod, only : get_flanduse_timeseries use dynSubgridControlMod, only : get_do_transient_lakes + use dynSubgridControlMod, only : get_do_transient_urban ! ! !ARGUMENTS: @@ -237,6 +238,11 @@ subroutine surfrd_get_data (begg, endg, ldomain, lfsurdat, actual_numcft) call surfrd_lakemask(begg, endg) end if + ! read the lakemask (necessary for initialization of dynamical urban) + if (get_do_transient_urban()) then + call surfrd_urbanmask(begg, endg) + end if + end subroutine surfrd_get_data !----------------------------------------------------------------------- @@ -792,5 +798,59 @@ subroutine surfrd_lakemask(begg, endg) end subroutine surfrd_lakemask + !----------------------------------------------------------------------- + subroutine surfrd_urbanmask(begg, endg) + ! + ! !DESCRIPTION: + ! Reads the urban mask, indicating where urban areas are and will grow + ! of the landuse.timeseries file. + ! Necessary for the initialization of the urban land units. + ! All urban density types will intialize if any type exists or will grow. + ! + ! !USES: + use clm_instur , only : hasurban + use dynSubgridControlMod , only : get_flanduse_timeseries + use clm_varctl , only : fname_len + use fileutils , only : getfil + ! + ! !ARGUMENTS: + integer, intent(in) :: begg, endg + ! + ! + ! !LOCAL VARIABLES: + type(file_desc_t) :: ncid_dynuse ! netcdf id for landuse timeseries file + character(len=256) :: locfn ! local file name + character(len=fname_len) :: fdynuse ! landuse.timeseries filename + logical :: readvar + ! + character(len=*), parameter :: subname = 'surfrd_urbanmask' + ! + !----------------------------------------------------------------------- + + ! get filename of landuse_timeseries file + fdynuse = get_flanduse_timeseries() + + if (masterproc) then + write(iulog,*) 'Attempting to read landuse.timeseries data .....' + if (fdynuse == ' ') then + write(iulog,*)'fdynuse must be specified' + call endrun(msg=errMsg(sourcefile, __LINE__)) + end if + end if + + call getfil(fdynuse, locfn, 0 ) + + ! open landuse_timeseries file + call ncd_pio_openfile (ncid_dynuse, trim(locfn), 0) + + ! read the lakemask + call ncd_io(ncid=ncid_dynuse, varname='PCT_URBAN_MAX', flag='read', data=hasurban, & + dim1name=grlnd, readvar=readvar) + if (.not. readvar) call endrun( msg=' ERROR: PCT_URBAN_MAX is not on landuse.timeseries file'//errMsg(sourcefile, __LINE__)) + + ! close landuse_timeseries file again + call ncd_pio_closefile(ncid_dynuse) + end subroutine surfrd_urbanmask + end module surfrdMod From 4e64c84c616ee8185f8887b89d8c5659eb6d2625 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 18 Feb 2022 13:26:07 -0700 Subject: [PATCH 206/223] updating docs for dev076 --- doc/ChangeLog | 98 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 99 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index 9f5b185ff6..11e2b58157 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,102 @@ =============================================================== +Tag name: ctsm5.1.dev076 +Originator(s): negins (Negin Sobhani,UCAR/TSS,303-497-1224) +Date: Fri Feb 18 13:25:19 MST 2022 +One-line Summary: updating subset_data.py script and move to the Python package. + +Purpose and description of changes +---------------------------------- +The purpose of this PR was to update and move the subset_data.py and other +relevant python scripts to the CTSM python package so the other python codes +(such as fsurdat_modifier) can re-use/import the capabilities of this script. + +This updated code now adds the capability to create user-mods for a generic +singlepoint case. + +The subset_data.py (top script called subset_data)extracts surface dataset, +domain file, landuse, and DATM files at a single point or a region from the +available global dataset. Next it modifies the surface dataset based on the +user options (command-line arguments) for example dom-pft or zero-ing non-veg +types, etc. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed or introduced +------------------------ +[Remove any lines that don't apply. Remove entire section if nothing applies.] + +Issues fixed (include CTSM Issue #): + Fixes CTSM/#935 -- Make surface dataset and landuse consistent for singlept + Fixes CTSM/#1436 -- Running subset_data from any directory + Fixes CTSM/#1437 -- issue with modify_singlept_site_neon for finding latest file + Fixes CTSM/#1594 -- try/except for python download neon data + Fixes CTSM/#1606 -- NEON tools not handling crop weights correctly. + Part of development needed for #1490 -- generic single point simulation. + Partially Fixes CTSM/#1622 -- check if file exists and abort if not found + --overwrite + Fixes it for subset_data + Partially Fixes CTSM/#1441 -- Move critical toolchain script out of tools/contrib + Fixes it for subset_data + +CIME Issues fixed (include issue #): N/A + +Known bugs introduced in this tag (include issue #): N/A + +Known bugs found since the previous tag (include issue #): N/A + + +Notes of particular relevance for developers: +--------------------------------------------- +Changes to tests or testing: + Added python/ctsm/test/test_unit_args_utils.py (11 unit tests) + Added python/ctsm/test/test_unit_singlept_data.py (10 unit tests) + Added python/ctsm/test/test_unit_singlept_data_surfdata.py (24 unit tests) + Modified tests in python/ctsm/test/test_unit_utils.py + Modified tests in python/ctsm/test/test_unit_modify_fsurdat.py + Added 10 new tests in tests/tools/input_tests_master thanks to @ekluzek + Added test/tools/tests_pretag_nompi_neon + +Testing summary: +---------------- + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + clm_pymods test suite on cheyenne - PASS + pylint -- PASS (10.00/10) (cheyenne + python/3.7.9) + Python unit tests -- PASS (cheyenne) + Python system tests -- PASS (cheyenne) + + tools-tests (test/tools) (if tools have been changed): + + cheyenne - OK (tests_pretag_nompi_neon + subset_data tests) + +Answer changes +-------------- +Changes answers relative to baseline: NO + + +Other details +------------- +Pull Requests that document the changes (include PR ids): +https://github.com/ESCOMP/CTSM/pull/1461 + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev075 Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326) Date: Wed Feb 16 21:35:40 MST 2022 diff --git a/doc/ChangeSum b/doc/ChangeSum index 26b3c9f754..41fe96d492 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev076 negins 02/18/2022 updating subset_data.py script and move to the Python package. ctsm5.1.dev075 erik 02/16/2022 Small answer changes: urban ventilation, fire-emission, irrigate off when not crop, fix two SSP ndep files ctsm5.1.dev074 slevis 02/02/2022 Introduce vert. resolved MIMICS as new method to solve below ground decomp. ctsm5.1.dev073 sacks 01/25/2022 Some fixes for Gregorian calendar From a4e712fa36845d6c5d344f82663bd6ffe044187c Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Fri, 18 Feb 2022 13:29:06 -0700 Subject: [PATCH 207/223] adding subset_data tests in here --- test/tools/tests_pretag_nompi_neon | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/tools/tests_pretag_nompi_neon b/test/tools/tests_pretag_nompi_neon index 1f72cadb53..43167e71c0 100644 --- a/test/tools/tests_pretag_nompi_neon +++ b/test/tools/tests_pretag_nompi_neon @@ -1,3 +1,7 @@ sm0a1 bl0a1 smaa2 blaa2 - +smba1 blba1 +smbb1 blbb1 +smb81 blb81 +smbc1 blbc1 +smbd1 blbd1 From 5a76634d940458fee4ff69a38471ee52216a47b2 Mon Sep 17 00:00:00 2001 From: Ryan Knox Date: Tue, 22 Feb 2022 10:40:12 -0700 Subject: [PATCH 208/223] Update fates external from test branch to newest tag --- Externals_CLM.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Externals_CLM.cfg b/Externals_CLM.cfg index fffa6fba70..e9c459f094 100644 --- a/Externals_CLM.cfg +++ b/Externals_CLM.cfg @@ -1,8 +1,8 @@ [fates] local_path = src/fates protocol = git -repo_url = https://github.com/rgknox/fates -branch = sci.1.53.0_api.21.0.0-merge-picard +repo_url = https://github.com/NGEET/fates +tag = sci.1.54.0_api.22.0.0 required = True [externals_description] From b5a550e383f3727dfac48836383751bc1fdedc7b Mon Sep 17 00:00:00 2001 From: Ryan Knox Date: Tue, 22 Feb 2022 13:14:04 -0500 Subject: [PATCH 209/223] Updated changelog --- doc/ChangeLog | 80 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 81 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index 11e2b58157..82484f4c77 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,84 @@ =============================================================== +Tag name: ctsm5.1.dev077 +Originator(s): rgknox (Ryan Knox,,,) +Date: Tue Feb 22 12:51:04 EST 2022 +One-line Summary: Updates to FATES API, including removal of patch dimensions from fates history and using soil instead of ground layers for fates history. + +Purpose and description of changes +---------------------------------- + +This set of changes cleaned up some aspects of the FATES history diagnostics API. Patches are not used in output diagnostics, so they were removed from the history coupling. Also, FATES only "sees" the soil, not the rock layers, so it is more appropriate to only align FATES below ground history output on the soil layers, and not ground layers. Also, the corresponding FATES side changes have some history output names changed. This is an API change as well, and is compatable with FATES: https://github.com/NGEET/fates/releases/tag/sci.1.54.0_api.22.0.0 + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + +[X] clm5_1-FATES-hydro + + +Notes of particular relevance for users +--------------------------------------- + + FATES users should now benefit from more interpretable history output from soil dimensioned variables, where they won't have to filter out ground layers which previously contained no-data flags. Also, note the following FATES history variable name changes: + + FATES_ROOTH2O_ABS_SZPF -> FATES_ABSROOT_H2O_SZPF + FATES_ROOTH2O_TRANS_SZPF -> FATES_TRANSROOT_H2O_SZPF + FATES_STEMH2O_SZPF -> FATES_STEM_H2O_SZPF + FATES_LEAFH2O_SZPF -> FATES_LEAF_H2O_SZPF + FATES_ROOTH2O_POT_SZPF -> FATES_ABSROOT_H2OPOT_SZPF + FATES_ROOTH2O_TRANSPOT_SZPF -> FATES_TRANSROOT_H2OPOT_SZPF + FATES_STEMH2O_POT_SZPF -> FATES_STEM_H2OPOT_SZPF + FATES_LEAFH2O_POT_SZPF -> FATES_LEAF_H2OPOT_SZPF + FATES_ROOT_ABSFRAC_SZPF -> FATES_ABSROOT_CONDFRAC_SZPF + FATES_ROOT_TRANSFRAC_SZPF -> FATES_TRANSROOT_CONDFRAC_SZPF + FATES_STEMH2O_FRAC_SZPF -> FATES_STEM_CONDFRAC_SZPF + FATES_LEAFH2O_FRAC_SZPF -> FATES_LEAF_CONDFRAC_SZPF + +Caveats for users (e.g., need to interpolate initial conditions): FATES users may have scripts that assume below ground diagnostics are on the ground dimension, and will have to update scripts to use the soil dimension. + +Changes made to namelist defaults (e.g., changed parameter values): No changes to defaults, but some FATES plant hydraulics history variable names were changed in the corresponding FATES tag. + +Changes to tests or testing: An update was made to the fates-hydro tests to use updated history variable names. + + +Testing summary: +---------------- + + d) regular (regular tests on normal machines if CTSM source is modified) + + cheyenne ---- /glade/scratch/rgknox/tests_0220-124230ch (OK) expected FATES baseline DIFFS + izumi ------- /scratch/cluster/rgknox/tests_0220-125238iz (OK) + + fates tests: fates test suite was run, (OK) + + +Answer changes +-------------- + +Changes answers relative to baseline: All results are B4B with base (dev076), except for two conditions: 1) FATES output that uses the soil dimension has NLCOMP and DIFFS due to the dimension change, and 2) FATES hydro simulations have different results due to bug fixes + + +Other details +------------- + +List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): fates + +Pull Requests that document the changes (include PR ids): +https://github.com/ESCOMP/CTSM/pull/1592 +https://github.com/NGEET/fates/pull/766 + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev076 Originator(s): negins (Negin Sobhani,UCAR/TSS,303-497-1224) Date: Fri Feb 18 13:25:19 MST 2022 diff --git a/doc/ChangeSum b/doc/ChangeSum index 41fe96d492..63dcb94b1e 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev077 rgknox 02/22/2022 Updates to FATES API, including removal of patch dimensions from fates history and using soil instead of ground layers for fates history ctsm5.1.dev076 negins 02/18/2022 updating subset_data.py script and move to the Python package. ctsm5.1.dev075 erik 02/16/2022 Small answer changes: urban ventilation, fire-emission, irrigate off when not crop, fix two SSP ndep files ctsm5.1.dev074 slevis 02/02/2022 Introduce vert. resolved MIMICS as new method to solve below ground decomp. From 546d500c001fd130fe338d523913fb3daa41356f Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Wed, 23 Feb 2022 15:34:46 -0700 Subject: [PATCH 210/223] run_sys_tests: allow specification of a default queue for create_test For cheyenne, use a default queue of 'regular' so that even single-point tests use the regular queue instead of the recently-flaky share queue. --- python/ctsm/machine.py | 16 +++++++++-- python/ctsm/machine_defaults.py | 12 ++++++++ python/ctsm/run_sys_tests.py | 32 +++++++++++++++++---- python/ctsm/test/test_unit_machine.py | 14 ++++++--- python/ctsm/test/test_unit_run_sys_tests.py | 3 ++ 5 files changed, 64 insertions(+), 13 deletions(-) diff --git a/python/ctsm/machine.py b/python/ctsm/machine.py index 36e5c61788..607e0b43af 100644 --- a/python/ctsm/machine.py +++ b/python/ctsm/machine.py @@ -7,6 +7,11 @@ from ctsm.joblauncher.job_launcher_factory import \ create_job_launcher, JOB_LAUNCHER_NOBATCH +# Value of create_test_queue for which we don't actually add a '--queue' option to +# create_test, but instead leave that value unspecified, allowing CIME to pick an +# appropriate queue for each test using its standard mechanisms. +CREATE_TEST_QUEUE_UNSPECIFIED = "unspecified" + logger = logging.getLogger(__name__) # TODO(wjs, 2018-08-31) Turn this into a real class, with getter methods. @@ -28,6 +33,7 @@ 'baseline_dir', # str 'account', # str or None 'create_test_retry', # int + 'create_test_queue', # str 'job_launcher']) # subclass of JobLauncherBase def create_machine(machine_name, defaults, job_launcher_type=None, @@ -80,6 +86,7 @@ def create_machine(machine_name, defaults, job_launcher_type=None, mach_defaults = defaults.get(machine_name) baseline_dir = None create_test_retry = 0 + create_test_queue = CREATE_TEST_QUEUE_UNSPECIFIED if mach_defaults is not None: if job_launcher_type is None: job_launcher_type = mach_defaults.job_launcher_type @@ -95,10 +102,12 @@ def create_machine(machine_name, defaults, job_launcher_type=None, # generation and comparison, or making a link in some temporary location that # points to the standard baselines). baseline_dir = mach_defaults.baseline_dir - # We also don't provide a way to override the default create_test_retry in the - # machine object: this will always give the default value for this machine, and - # other mechanisms will be given for overriding this in a particular case. + # We also don't provide a way to override the default create_test_retry or + # create_test_queue in the machine object: these will always give the default + # value for this machine, and other mechanisms will be given for overriding these + # in a particular case. create_test_retry = mach_defaults.create_test_retry + create_test_queue = mach_defaults.create_test_queue if account is None and mach_defaults.account_required and not allow_missing_entries: raise RuntimeError("Could not find an account code") else: @@ -149,6 +158,7 @@ def create_machine(machine_name, defaults, job_launcher_type=None, baseline_dir=baseline_dir, account=account, create_test_retry=create_test_retry, + create_test_queue=create_test_queue, job_launcher=job_launcher) def get_possibly_overridden_mach_value(machine, varname, value=None): diff --git a/python/ctsm/machine_defaults.py b/python/ctsm/machine_defaults.py index c6b624b885..6b387741d5 100644 --- a/python/ctsm/machine_defaults.py +++ b/python/ctsm/machine_defaults.py @@ -6,6 +6,7 @@ import os from ctsm.joblauncher.job_launcher_factory import \ JOB_LAUNCHER_QSUB +from ctsm.machine import CREATE_TEST_QUEUE_UNSPECIFIED from ctsm.machine_utils import get_user MachineDefaults = namedtuple('MachineDefaults', ['job_launcher_type', @@ -13,6 +14,7 @@ 'baseline_dir', 'account_required', 'create_test_retry', + 'create_test_queue', 'job_launcher_defaults']) # job_launcher_type: one of the JOB_LAUNCHERs defined in job_launcher_factory # scratch_dir: str @@ -23,6 +25,10 @@ # for the non-default job launcher for this machine, in case the user chooses a # non-default launcher.) # create_test_retry: int: Default number of times to retry a create_test job on this machine +# create_test_queue: str: Default queue to use for create_test; if this is +# CREATE_TEST_QUEUE_UNSPECIFIED, then we won't add a '--queue' option to create_test, +# instead leaving that value unspecified, allowing CIME to pick an appropriate queue +# for each test using its standard mechanisms. # account_required: bool: whether an account number is required on this machine (not # really a default, but used for error-checking) @@ -43,6 +49,10 @@ baseline_dir=os.path.join(os.path.sep, 'glade', 'p', 'cgd', 'tss', 'ctsm_baselines'), account_required=True, create_test_retry=0, + # NOTE(wjs, 2022-02-23) By default, use the regular queue, even for + # single-processor jobs. This is because the share queue has been really flaky, + # with lots of job failures or slow-running jobs. + create_test_queue='regular', job_launcher_defaults={ JOB_LAUNCHER_QSUB: QsubDefaults( queue='regular', @@ -60,6 +70,7 @@ baseline_dir=os.path.join(os.path.sep, 'fs', 'cgd', 'csm', 'ccsm_baselines'), account_required=False, create_test_retry=0, + create_test_queue=CREATE_TEST_QUEUE_UNSPECIFIED, job_launcher_defaults={ JOB_LAUNCHER_QSUB: QsubDefaults( queue='medium', @@ -75,6 +86,7 @@ # jobs on izumi experience a high frequency of failures, often at the very end of # the job; so we'll automatically retry a failed job twice before giving up on it create_test_retry=2, + create_test_queue=CREATE_TEST_QUEUE_UNSPECIFIED, job_launcher_defaults={ JOB_LAUNCHER_QSUB: QsubDefaults( queue='medium', diff --git a/python/ctsm/run_sys_tests.py b/python/ctsm/run_sys_tests.py index f45aa81927..a72ac59a98 100644 --- a/python/ctsm/run_sys_tests.py +++ b/python/ctsm/run_sys_tests.py @@ -1,6 +1,5 @@ """Functions implementing run_sys_tests command""" -from __future__ import print_function import argparse import logging import os @@ -13,7 +12,8 @@ from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args from ctsm.machine_utils import get_machine_name -from ctsm.machine import create_machine, get_possibly_overridden_mach_value +from ctsm.machine import (create_machine, get_possibly_overridden_mach_value, + CREATE_TEST_QUEUE_UNSPECIFIED) from ctsm.machine_defaults import MACHINE_DEFAULTS from ctsm.os_utils import make_link from ctsm.path_utils import path_to_ctsm_root @@ -119,13 +119,15 @@ def run_sys_tests(machine, cime_path, that is None, then the test suite will determine it automatically) walltime (str): walltime to use for each test (if not provided, the test suite will determine it automatically) - queue (str): queue to use for each test (if not provided, the test suite will - determine it automatically) + queue (str): queue to use for each test (if not provided, will use the default for + this machine based on the passed-in machine object; if that is unspecified, then + the test suite will determine it automatically) retry (int): retry value to pass to create_test (if not provided, will use the default for this machine) extra_create_test_args (str): any extra arguments to create_test, as a single, space-delimited string testlist: list of strings giving test names to run + """ num_provided_options = ((suite_name is not None) + (testfile is not None) + @@ -148,6 +150,20 @@ def run_sys_tests(machine, cime_path, retry_final = get_possibly_overridden_mach_value(machine, varname='create_test_retry', value=retry) + # Note the distinction between a queue of None and a queue of + # CREATE_TEST_QUEUE_UNSPECIFIED in the following: If queue is None (meaning that the + # user hasn't specified a '--queue' argument to run_sys_tests), then we'll use the + # queue specified in the machine object; if queue is CREATE_TEST_QUEUE_UNSPECIFIED, + # then we'll force queue_final to be None, which means we won't add a '--queue' + # argument to create_test, regardless of what is specified in the machine object. + # (It's also possible for the machine object to specify a queue of + # CREATE_TEST_QUEUE_UNSPECIFIED, which means that we won't use a '--queue' argument to + # create_test unless the user specifies a '--queue' argument to run_sys_tests.) + queue_final = get_possibly_overridden_mach_value(machine, + varname='create_test_queue', + value=queue) + if queue_final == CREATE_TEST_QUEUE_UNSPECIFIED: + queue_final = None if not skip_git_status: _record_git_status(testroot, retry_final, dry_run) @@ -159,7 +175,7 @@ def run_sys_tests(machine, cime_path, baseline_root=baseline_root_final, account=machine.account, walltime=walltime, - queue=queue, + queue=queue_final, retry=retry_final, rerun_existing_failures=rerun_existing_failures, extra_create_test_args=extra_create_test_args) @@ -309,7 +325,11 @@ def _commandline_args(): parser.add_argument('--queue', help='Queue to which tests are submitted.\n' - 'If not provided, uses machine default.') + 'The special value "{}" means do not add a --queue option to create_test,\n' + 'instead allowing CIME to pick an appropriate queue for each test\n' + 'using its standard mechanisms.\n' + 'Default for this machine: {}'.format( + CREATE_TEST_QUEUE_UNSPECIFIED, default_machine.create_test_queue)) parser.add_argument('--retry', type=int, help='Argument to create_test: Number of times to retry failed tests.\n' diff --git a/python/ctsm/test/test_unit_machine.py b/python/ctsm/test/test_unit_machine.py index 6a2f7ac172..bc1a6f777d 100755 --- a/python/ctsm/test/test_unit_machine.py +++ b/python/ctsm/test/test_unit_machine.py @@ -9,7 +9,8 @@ from ctsm import add_cime_to_path # pylint: disable=unused-import from ctsm import unit_testing -from ctsm.machine import create_machine, get_possibly_overridden_mach_value +from ctsm.machine import (create_machine, get_possibly_overridden_mach_value, + CREATE_TEST_QUEUE_UNSPECIFIED) from ctsm.machine_utils import get_user from ctsm.machine_defaults import MACHINE_DEFAULTS, MachineDefaults, QsubDefaults from ctsm.joblauncher.job_launcher_no_batch import JobLauncherNoBatch @@ -24,7 +25,8 @@ class TestCreateMachine(unittest.TestCase): """Tests of create_machine""" def assertMachineInfo(self, machine, name, scratch_dir, baseline_dir, account, - create_test_retry=0): + create_test_retry=0, + create_test_queue=CREATE_TEST_QUEUE_UNSPECIFIED): """Asserts that the basic machine info is as expected. This does NOT dive down into the job launcher""" @@ -33,6 +35,7 @@ def assertMachineInfo(self, machine, name, scratch_dir, baseline_dir, account, self.assertEqual(machine.baseline_dir, baseline_dir) self.assertEqual(machine.account, account) self.assertEqual(machine.create_test_retry, create_test_retry) + self.assertEqual(machine.create_test_queue, create_test_queue) def assertNoBatchInfo(self, machine, nice_level=None): """Asserts that the machine's launcher is of type JobLauncherNoBatch""" @@ -65,6 +68,7 @@ def create_defaults(default_job_launcher=JOB_LAUNCHER_QSUB): baseline_dir=os.path.join(os.path.sep, 'my', 'baselines'), account_required=True, create_test_retry=2, + create_test_queue="regular", job_launcher_defaults={ JOB_LAUNCHER_QSUB: QsubDefaults( queue='regular', @@ -134,7 +138,8 @@ def test_knownMachine_defaults(self): get_user()), baseline_dir=os.path.join(os.path.sep, 'my', 'baselines'), account='a123', - create_test_retry=2) + create_test_retry=2, + create_test_queue="regular") self.assertQsubInfo(machine=machine, queue='regular', walltime='06:00:00', @@ -157,7 +162,8 @@ def test_knownMachine_argsExplicit(self): scratch_dir='/custom/path/to/scratch', baseline_dir=os.path.join(os.path.sep, 'my', 'baselines'), account='a123', - create_test_retry=2) + create_test_retry=2, + create_test_queue="regular") self.assertQsubInfo(machine=machine, queue='custom_queue', walltime='9:87:65', diff --git a/python/ctsm/test/test_unit_run_sys_tests.py b/python/ctsm/test/test_unit_run_sys_tests.py index 8a53081a5b..218001c7f7 100755 --- a/python/ctsm/test/test_unit_run_sys_tests.py +++ b/python/ctsm/test/test_unit_run_sys_tests.py @@ -124,6 +124,9 @@ def test_createTestCommand_testnames(self): assertNotRegex(self, command, r'--compare\s') assertNotRegex(self, command, r'--generate\s') assertNotRegex(self, command, r'--baseline-root\s') + # In the machine object for this test, create_test_queue will be 'unspecified'; + # verify that this results in there being no '--queue' argument: + assertNotRegex(self, command, r'--queue\s') expected_cs_status = os.path.join(self._scratch, self._expected_testroot(), From 154770797d01ebeb93dfa23f60d311ddc7e1fec3 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Wed, 23 Feb 2022 21:27:35 -0700 Subject: [PATCH 211/223] Rework single point testing Main motivation is to avoid the share-queue-related issues we keep hitting on cheyenne, and also move to slightly better balance between our izumi and cheyenne testing, reducing our overall test turnaround time. I am also combining or cutting single point tests that feel particularly redundant or unnecessary. Partially addresses https://github.com/ESCOMP/CTSM/issues/275 --- cime_config/config_compsets.xml | 4 + cime_config/testdefs/testlist_clm.xml | 155 +++++------------- .../cropMonthlyNoinitial/include_user_mods | 1 + .../clm/cropMonthlyNoinitial/shell_commands | 1 + .../clm/ptsRLB/include_user_mods | 1 - .../testmods_dirs/clm/ptsRLB/shell_commands | 2 - 6 files changed, 47 insertions(+), 117 deletions(-) create mode 100644 cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/include_user_mods create mode 100755 cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/shell_commands delete mode 100644 cime_config/testdefs/testmods_dirs/clm/ptsRLB/include_user_mods delete mode 100644 cime_config/testdefs/testmods_dirs/clm/ptsRLB/shell_commands diff --git a/cime_config/config_compsets.xml b/cime_config/config_compsets.xml index aec030b47e..187f359c11 100644 --- a/cime_config/config_compsets.xml +++ b/cime_config/config_compsets.xml @@ -229,6 +229,10 @@ I2000Clm50FatesRs 2000_DATM%GSWP3v1_CLM50%FATES_SICE_SOCN_SROF_SGLC_SWAV + + I2000Clm51FatesRs + 2000_DATM%GSWP3v1_CLM51%FATES_SICE_SOCN_SROF_SGLC_SWAV + I1850Clm50Bgc diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index dd40b43f2a..9c32bd09d1 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -542,7 +542,7 @@ - + @@ -1124,14 +1124,6 @@ - - - - - - - - @@ -1143,7 +1135,7 @@ - + @@ -1158,26 +1150,9 @@ - + - - - - - - - - - - - - - - - - - - + @@ -1186,39 +1161,39 @@ - + - + - + - + - + - + - + - + - + - + @@ -1232,7 +1207,7 @@ - + @@ -1277,7 +1252,7 @@ - + @@ -1287,7 +1262,7 @@ - + @@ -1496,27 +1471,10 @@ - - - - - - - - - - - - - - - - - + - - + @@ -1525,11 +1483,11 @@ - + + - @@ -1537,8 +1495,8 @@ - - + + @@ -1637,14 +1595,15 @@ - + - - + + + @@ -1689,7 +1648,7 @@ - + @@ -1750,34 +1709,9 @@ - + - - - - - - - - - - - - - - - - - - - - - - - - - - + @@ -1893,8 +1827,7 @@ - - + @@ -1902,7 +1835,7 @@ - + @@ -1945,9 +1878,7 @@ - - @@ -1984,7 +1915,6 @@ - @@ -1995,6 +1925,7 @@ + @@ -2018,16 +1949,7 @@ - - - - - - - - - - + @@ -2304,7 +2226,7 @@ - + @@ -2493,12 +2415,17 @@ - + + + + + + diff --git a/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/include_user_mods new file mode 100644 index 0000000000..02ec13743f --- /dev/null +++ b/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/include_user_mods @@ -0,0 +1 @@ +../cropMonthOutput diff --git a/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/shell_commands b/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/shell_commands new file mode 100755 index 0000000000..2a9f09bd75 --- /dev/null +++ b/cime_config/testdefs/testmods_dirs/clm/cropMonthlyNoinitial/shell_commands @@ -0,0 +1 @@ +./xmlchange CLM_FORCE_COLDSTART="on" diff --git a/cime_config/testdefs/testmods_dirs/clm/ptsRLB/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/ptsRLB/include_user_mods deleted file mode 100644 index cdb9d9f000..0000000000 --- a/cime_config/testdefs/testmods_dirs/clm/ptsRLB/include_user_mods +++ /dev/null @@ -1 +0,0 @@ -../pts diff --git a/cime_config/testdefs/testmods_dirs/clm/ptsRLB/shell_commands b/cime_config/testdefs/testmods_dirs/clm/ptsRLB/shell_commands deleted file mode 100644 index 15fd1cced4..0000000000 --- a/cime_config/testdefs/testmods_dirs/clm/ptsRLB/shell_commands +++ /dev/null @@ -1,2 +0,0 @@ -./xmlchange PTS_LAT=-5,PTS_LON=290 -./xmlchange --force CLM_FORCE_COLDSTART=on From 05b54929824971b39ffaea1753fd523b37167d0b Mon Sep 17 00:00:00 2001 From: Bowen Fang Date: Thu, 24 Feb 2022 23:21:12 +0800 Subject: [PATCH 212/223] bug fix and name change 1. Fixed urban density type index for conditional initialization. 2. Change internal variable name from hasurban to pct_urban_max for consistency. --- src/main/clm_initializeMod.F90 | 6 +++--- src/main/clm_varsur.F90 | 4 ++-- src/main/subgridMod.F90 | 14 ++++++++------ src/main/surfrdMod.F90 | 6 +++--- 4 files changed, 16 insertions(+), 14 deletions(-) diff --git a/src/main/clm_initializeMod.F90 b/src/main/clm_initializeMod.F90 index 720a3aa6a8..1415152170 100644 --- a/src/main/clm_initializeMod.F90 +++ b/src/main/clm_initializeMod.F90 @@ -16,7 +16,7 @@ module clm_initializeMod use clm_varctl , only : use_lch4, use_cn, use_cndv, use_c13, use_c14, use_fates use clm_varctl , only : use_soil_moisture_streams use clm_instur , only : wt_lunit, urban_valid, wt_nat_patch, wt_cft, fert_cft - use clm_instur , only : irrig_method, wt_glc_mec, topo_glc_mec, haslake, hasurban + use clm_instur , only : irrig_method, wt_glc_mec, topo_glc_mec, haslake, pct_urban_max use perf_mod , only : t_startf, t_stopf use readParamsMod , only : readParameters use ncdio_pio , only : file_desc_t @@ -214,7 +214,7 @@ subroutine initialize2(ni,nj) allocate (wt_glc_mec (begg:endg, maxpatch_glc )) allocate (topo_glc_mec (begg:endg, maxpatch_glc )) allocate (haslake (begg:endg )) - allocate (hasurban (begg:endg, numurbl )) + allocate (pct_urban_max(begg:endg, numurbl )) ! Read list of Patches and their corresponding parameter values ! Independent of model resolution, Needs to stay before surfrd_get_data @@ -289,7 +289,7 @@ subroutine initialize2(ni,nj) ! Deallocate surface grid dynamic memory for variables that aren't needed elsewhere. ! Some things are kept until the end of initialize2; urban_valid is kept through the ! end of the run for error checking. - deallocate (wt_lunit, wt_cft, wt_glc_mec, haslake, hasurban) + deallocate (wt_lunit, wt_cft, wt_glc_mec, haslake, pct_urban_max) ! Determine processor bounds and clumps for this processor call get_proc_bounds(bounds_proc) diff --git a/src/main/clm_varsur.F90 b/src/main/clm_varsur.F90 index e91c6a5880..f9cafc28b2 100644 --- a/src/main/clm_varsur.F90 +++ b/src/main/clm_varsur.F90 @@ -50,8 +50,8 @@ module clm_instur logical , pointer :: haslake(:) ! whether we have urban to initialize in each grid cell - ! (second dimension goes 0:numurbl-1) - logical , pointer :: hasurban(:,:) + ! (second dimension goes 1:numurbl) + double , pointer :: pct_urban_max(:,:) !----------------------------------------------------------------------- end module clm_instur diff --git a/src/main/subgridMod.F90 b/src/main/subgridMod.F90 index d0eb285d65..bddacaafb8 100644 --- a/src/main/subgridMod.F90 +++ b/src/main/subgridMod.F90 @@ -350,9 +350,9 @@ subroutine subgrid_get_info_urban(gi, ltype, npatches, ncols, nlunits) ! In either case, for simplicity, we always allocate space for all columns on any ! allocated urban landunits. - ! For dynamic urban: to improve efficiency, 'hasurban' is added in landuse.timeseries + ! For dynamic urban: to improve efficiency, 'PCT_URBAN_MAX' is added in landuse.timeseries ! that tells if any urban landunit ever grows in a given grid cell in a transient - ! run. The urban landunit is allocated only if hasurban is true. (#1572) + ! run. The urban landunit is allocated only if PCT_URBAN_MAX is above 0. (#1572) if (run_zero_weight_urban) then if (urban_valid(gi)) then @@ -615,11 +615,11 @@ function urban_landunit_exists(gi, ltype) result(exists) ! ! !DESCRIPTION: ! Returns true if a landunit for urban should be created in memory - ! which is defined for gridcells which will grow urban, given by hasurban + ! which is defined for gridcells which will grow urban, given by pct_urban_max ! ! !USES: use dynSubgridControlMod , only : get_do_transient_urban - use clm_instur , only : hasurban + use clm_instur , only : pct_urban_max use clm_varcon , only : isturb_MIN ! ! !ARGUMENTS: @@ -628,15 +628,17 @@ function urban_landunit_exists(gi, ltype) result(exists) integer, intent(in) :: ltype !landunit type (isturb_tbd, etc.) ! ! !LOCAL VARIABLES: + integer :: dens_index ! urban density type index character(len=*), parameter :: subname = 'urban_landunit_exists' !----------------------------------------------------------------------- if (get_do_transient_urban()) then ! To support dynamic landunits, we initialize an urban land unit in each grid cell - ! in which there are urban. This is defined by the hasurban variable. + ! in which there are urban. This is defined by the pct_urban_max variable. - if (hasurban(gi,ltype-isturb_MIN)) then + dens_index = ltype - isturb_MIN + 1 + if (pct_urban_max(gi,dens_index) > 0.0_r8) then exists = .true. else exists = .false. diff --git a/src/main/surfrdMod.F90 b/src/main/surfrdMod.F90 index f4ecaa3912..72c592ff7f 100644 --- a/src/main/surfrdMod.F90 +++ b/src/main/surfrdMod.F90 @@ -808,7 +808,7 @@ subroutine surfrd_urbanmask(begg, endg) ! All urban density types will intialize if any type exists or will grow. ! ! !USES: - use clm_instur , only : hasurban + use clm_instur , only : pct_urban_max use dynSubgridControlMod , only : get_flanduse_timeseries use clm_varctl , only : fname_len use fileutils , only : getfil @@ -843,8 +843,8 @@ subroutine surfrd_urbanmask(begg, endg) ! open landuse_timeseries file call ncd_pio_openfile (ncid_dynuse, trim(locfn), 0) - ! read the lakemask - call ncd_io(ncid=ncid_dynuse, varname='PCT_URBAN_MAX', flag='read', data=hasurban, & + ! read the urbanmask + call ncd_io(ncid=ncid_dynuse, varname='PCT_URBAN_MAX', flag='read', data=pct_urban_max, & dim1name=grlnd, readvar=readvar) if (.not. readvar) call endrun( msg=' ERROR: PCT_URBAN_MAX is not on landuse.timeseries file'//errMsg(sourcefile, __LINE__)) From e1c2facf9bafc9d23bd02d170ac3fdafbc7f6a40 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Thu, 24 Feb 2022 14:27:19 -0700 Subject: [PATCH 213/223] Update ChangeLog --- doc/ChangeLog | 88 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 89 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index 82484f4c77..d2f00deaae 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,92 @@ =============================================================== +Tag name: ctsm5.1.dev078 +Originator(s): sacks (Bill Sacks) +Date: Thu Feb 24 14:15:46 MST 2022 +One-line Summary: Rework single-point testing + +Purpose and description of changes +---------------------------------- + +Two changes to make the single-point tests in our test suite run more +smoothly. The main motivation for this is that single-point tests have +been failing frequently on cheyenne, presumably due to issues with nodes +in the share queue. + +(1) Rework the test list to remove redundant single-point tests and move + many of our single-point tests from cheyenne to izumi. (With (2) in + place, this isn't really necessary, but given that we're going to be + using full nodes for single-point tests, it seems better to do this + on izumi rather than wasting all of these processors on the + generally more heavily-loaded cheyenne. Also, this helps accomplish + the goal of providing better balance between our different + machine-compiler permutations, helping to reduce overall testing + time.) + +(2) Change run_sys_tests so that, for the remaining single-point tests + on cheyenne, we use the regular queue rather than the share queue. + This means that single-point tests use a full node, which is + wasteful but makes the tests more reliable. (You can override this + behavior with the --queue option to run_sys_tests: To revert to the + earlier behavior, where the queue is determined automatically for + each test, add '--queue unspecified' to your run_sys_tests + invocation. (As before --queue can also be used to specify some + other queue to use for all tests.)) + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed or introduced +------------------------ +Issues fixed (include CTSM Issue #): +- Partially addresses ESCOMP/CTSM#275 + + +Notes of particular relevance for developers: +--------------------------------------------- +Changes to tests or testing: +- Overhauls single-point tests. See + https://github.com/ESCOMP/CTSM/pull/1660#issuecomment-1049489430 for details. + + +Testing summary: +---------------- + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + cheyenne - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- PASS + izumi ------- PASS + +Answer changes +-------------- + +Changes answers relative to baseline: NO + +Other details +------------- +Pull Requests that document the changes (include PR ids): +https://github.com/ESCOMP/CTSM/pull/1660 + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev077 Originator(s): rgknox (Ryan Knox,,,) Date: Tue Feb 22 12:51:04 EST 2022 diff --git a/doc/ChangeSum b/doc/ChangeSum index 63dcb94b1e..e40f4756d8 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev078 sacks 02/24/2022 Rework single-point testing ctsm5.1.dev077 rgknox 02/22/2022 Updates to FATES API, including removal of patch dimensions from fates history and using soil instead of ground layers for fates history ctsm5.1.dev076 negins 02/18/2022 updating subset_data.py script and move to the Python package. ctsm5.1.dev075 erik 02/16/2022 Small answer changes: urban ventilation, fire-emission, irrigate off when not crop, fix two SSP ndep files From 270d14776d2ba452173ad59052624c92ef7314da Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Thu, 24 Feb 2022 14:53:44 -0700 Subject: [PATCH 214/223] Update ChangeLog --- doc/ChangeLog | 94 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 95 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index d2f00deaae..765458b41d 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,98 @@ =============================================================== +Tag name: ctsm5.1.dev079 +Originator(s): sacks (Bill Sacks) +Date: Thu Feb 24 14:40:58 MST 2022 +One-line Summary: Changes to CropPhenology timing + +Purpose and description of changes +---------------------------------- + +Changes to CropPhenology timing to support +https://github.com/escomp/ctsm/pull/1616 and other work being done by +Sam Rabin: + +(1) Change CropPhenology to look at the time as of the START of the time + step. Previously, CropPhenology looked at time as of the END of the + time step. This was somewhat problematic, particularly because it + meant that the last time step of the year was considered Jan 1, and + so crops with a planting window beginning Jan 1 could be planted at + the end of the previous year rather than the start of the new year. + This was becoming particularly problematic in the context of Sam + Rabin's upcoming prescribed sowing date work (see #1623 and #1616 + for some discussion). + +(2) Call CropPhenology regardless of doalb (however, still do not call + CropPhenology on time step 0). (See some discussion in #1626 and + #1623.) + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed or introduced +------------------------ +[Remove any lines that don't apply. Remove entire section if nothing applies.] + +Issues fixed (include CTSM Issue #): +- Resolves ESCOMP/CTSM#1623 (Change CropPhenology to look at the day of + year as of the start of the time step, not the end of the time step) + +Testing summary: +---------------- + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- OK + izumi ------- OK + +Also ran +ERS_Ld3_D.f09_g17.I1850Clm50BgcCrop.cheyenne_intel.clm-rad_hrly_light_res_half +(a debug version of the one test in the test suite that uses hourly +doalb) + +Answer changes +-------------- + +Changes answers relative to baseline: YES + + Summarize any changes to answers, i.e., + - what code configurations: Crop cases + - what platforms/compilers: All + - nature of change (roundoff; larger than roundoff/same climate; new climate): + Not investigated carefully, but expected to be larger than + roundoff/same climate + + Based on an analysis of the code, the differences should be small, + but I haven't investigated this with simulations. In principle, + there could be larger answer changes for crops that invoke + vernalization in cases where doalb isn't true every time step, but + currently I think we don't simulate any crops that invoke + vernalization. It's also possible that there are other larger + changes in cases where doalb isn't true every time step (i.e., F + and B compsets), though from reading through the code, I don't see + any potentials for big changes. + + +Other details +------------- +Pull Requests that document the changes (include PR ids): +https://github.com/ESCOMP/CTSM/pull/1628 + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev078 Originator(s): sacks (Bill Sacks) Date: Thu Feb 24 14:15:46 MST 2022 diff --git a/doc/ChangeSum b/doc/ChangeSum index e40f4756d8..5c0bed3cdb 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev079 sacks 02/24/2022 Changes to CropPhenology timing ctsm5.1.dev078 sacks 02/24/2022 Rework single-point testing ctsm5.1.dev077 rgknox 02/22/2022 Updates to FATES API, including removal of patch dimensions from fates history and using soil instead of ground layers for fates history ctsm5.1.dev076 negins 02/18/2022 updating subset_data.py script and move to the Python package. From a17c76df3014d3db71297e93c4c1c3021c41dce0 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Thu, 24 Feb 2022 15:39:56 -0700 Subject: [PATCH 215/223] Update ChangeLog --- doc/ChangeLog | 89 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 90 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index 765458b41d..9abddd0f66 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,93 @@ =============================================================== +Tag name: ctsm5.1.dev080 +Originator(s): sacks (Bill Sacks) +Date: Thu Feb 24 15:26:02 MST 2022 +One-line Summary: Use avg days per year when converting param units + +Purpose and description of changes +---------------------------------- + +When converting parameter units from per-year to per-second, use average +days per year instead of current number of days per year. This is +relevant when running with a Gregorian calendar. + +See https://github.com/ESCOMP/CTSM/issues/1612 for details. + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed or introduced +------------------------ +Issues fixed (include CTSM Issue #): +- Resolves ESCOMP/CTSM#1612 (Some uses of get_days_per_year should use + the average number of days in a year, not the number of days in the + current year) + +Known bugs introduced in this tag (include issue #): +- ESCOMP/CTSM#1624 (Change get_average_days_per_year to use + ESMF_CalendarGet) + +Notes of particular relevance for developers: +--------------------------------------------- +Caveats for developers (e.g., code that is duplicated that requires double maintenance): +- Once ESMF supports it, we should change get_average_days_per_year to + use ESMF_CalendarGet (see ESCOMP/CTSM#1624) + +Testing summary: +---------------- + + cheyenne ---- OK + izumi ------- OK + +Test +ERS_Ly3_P72x2_Vmct.f10_f10_mg37.IHistClm50BgcCropG.cheyenne_intel.clm-cropMonthOutput +initially failed COMPARE_base_rest and BASELINE comparisons; rerunning +solved the issue. + +Answer changes +-------------- + +Changes answers relative to baseline: YES + + Summarize any changes to answers, i.e., + - what code configurations: Gregorian cases with BGC + - what platforms/compilers: all + - nature of change (roundoff; larger than roundoff/same climate; new climate): + larger than roundoff / same climate + + Changes a few BGC-related parameters by a small amount (< 1/365 in + a relative sense) for Gregorian cases + + Changes answers for these tests: + - SMS_Ly5_Mmpi-serial.1x1_smallvilleIA.IHistClm50BgcCropQianRs.izumi_gnu.clm-gregorian_cropMonthOutput + - DAE_C2_D_Lh12.f10_f10_mg37.I2000Clm50BgcCrop.cheyenne_intel.clm-DA_multidrv + - DAE_N2_D_Lh12_Vmct.f10_f10_mg37.I2000Clm50BgcCrop.cheyenne_intel.clm-DA_multidrv + + In principle, might change answers by roundoff for NOLEAP BGC + tests on some machines / compilers, but that wasn't seen in any + aux_clm testing. + +Other details +------------- +Pull Requests that document the changes (include PR ids): +https://github.com/ESCOMP/CTSM/pull/1625 + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev079 Originator(s): sacks (Bill Sacks) Date: Thu Feb 24 14:40:58 MST 2022 diff --git a/doc/ChangeSum b/doc/ChangeSum index 5c0bed3cdb..b389c325f8 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev080 sacks 02/24/2022 Use avg days per year when converting param units ctsm5.1.dev079 sacks 02/24/2022 Changes to CropPhenology timing ctsm5.1.dev078 sacks 02/24/2022 Rework single-point testing ctsm5.1.dev077 rgknox 02/22/2022 Updates to FATES API, including removal of patch dimensions from fates history and using soil instead of ground layers for fates history From 13510ae2bb538a87c192cf5d937aeda4858b7562 Mon Sep 17 00:00:00 2001 From: Keith Oleson Date: Thu, 24 Feb 2022 16:04:24 -0700 Subject: [PATCH 216/223] Changes to get dynamic urban test working. --- .../modify_smallville_with_dynurban.ncl | 31 ++++++++++++------- .../smallville_dynurban_monthly/user_nl_clm | 9 +++--- src/main/clm_initializeMod.F90 | 5 +-- src/main/clm_varsur.F90 | 2 +- src/main/subgridMod.F90 | 12 ++----- src/main/subgridWeightsMod.F90 | 18 ++++++----- src/main/surfrdMod.F90 | 8 +++-- 7 files changed, 47 insertions(+), 38 deletions(-) diff --git a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl index a70d5e9641..15ec0469be 100644 --- a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl +++ b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl @@ -1,7 +1,9 @@ ; NCL script ; modify_smallville_with_dynurban.ncl ; Keith Oleson, Dec 2021 -; Purpose is to create a dynamic urban file for the smallville grid for test +; Feb 23, 2022: Change HASURBAN to PCT_URBAN_MAX. The output file date has been updated from +; c211206 to c220223. +; Purpose is to create a transient landuse file for the smallville grid for dynamic urban testing ; ERS_Lm25.1x1_smallvilleIA.IHistClm50BgcCropQianRs.cheyenne_gnu.clm-smallville_dynurban_monthly ;************************************** @@ -17,7 +19,7 @@ begin print ("=========================================") infile = "/glade/p/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc" - outfile = "/glade/p/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c211206.nc" + outfile = "/glade/p/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc" system("cp " + infile + " " + outfile) @@ -37,24 +39,29 @@ begin pct_urban@units = "unitless" printVarSummary(pct_urban) - hasurban = new((/numurbl,dimsizes(pct_crop(0,:,0)),dimsizes(pct_crop(0,0,:))/),double,"No_FillValue") - hasurban!0 = pct_urban!1 - hasurban!1 = pct_urban!2 - hasurban!2 = pct_urban!3 - hasurban = 1.d - printVarSummary(hasurban) - pct_urban(:,0,0,0) = (/0.d,20.d,10.d,10.d,10.d,10.d/) pct_urban(:,1,0,0) = (/0.d,15.d, 8.d, 8.d, 8.d, 8.d/) - pct_urban(:,2,0,0) = (/0.d,10.d, 5.d, 5.d, 5.d, 5.d/) +;pct_urban(:,2,0,0) = (/0.d,10.d, 5.d, 5.d, 5.d, 5.d/) + pct_urban(:,2,0,0) = (/0.d, 0.d, 0.d, 0.d, 0.d, 0.d/) + + pct_urban_max = new((/numurbl,dimsizes(pct_crop(0,:,0)),dimsizes(pct_crop(0,0,:))/),double,"No_FillValue") + pct_urban_max!0 = pct_urban!1 + pct_urban_max!1 = pct_urban!2 + pct_urban_max!2 = pct_urban!3 + pct_urban_max(0,:,:) = max(pct_urban(:,0,0,0)) + pct_urban_max(1,:,:) = max(pct_urban(:,1,0,0)) + pct_urban_max(2,:,:) = max(pct_urban(:,2,0,0)) + printVarSummary(pct_urban_max) + pct_urban_max@units = "unitless" + pct_urban_max@long_name = "maximum percent urban for each density type (tbd, hd, md)" pct_crop(:,0,0) = (/0.,25.,12.,12.,12.,12./) - outf->HASURBAN = hasurban + outf->PCT_URBAN_MAX = pct_urban_max outf->PCT_URBAN = pct_urban outf->PCT_CROP = pct_crop - outf@history = "This file was created with the following NCL script: /glade/p/cgd/tss/people/oleson/modify_surfdata/modify_smallville_with_dynurban.ncl. The file used as a template is: /glade/p/cesm/cseg/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc. Key points are that urban area starts as 0, increases after the first year, then decreases after the second year. PCT_CROP is also changed so that PCT_URBAN + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_URBAN in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.). Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid." + outf@history = "This file was created with the following NCL script: /glade/p/cgd/tss/people/oleson/modify_surfdata/modify_smallville_with_dynurban.ncl. The file used as a template is: /glade/p/cesm/cseg/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc. Key points are that urban area starts as 0, increases after the first year, then decreases after the second year. Medium density urban is set to zero to test the memory-saving behavior of PCT_URBAN_MAX. PCT_CROP is also changed so that PCT_URBAN + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_URBAN in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.). Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid." print ("=========================================") print ("Finish Time: "+systemfunc("date") ) diff --git a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm index ebda6ab408..dc9e2bf333 100644 --- a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm @@ -1,12 +1,13 @@ do_transient_urban = .true. -!KO The following run_zero_weight_urban setting is temporary until the HASURBAN methdology is implemented. -run_zero_weight_urban = .true. -! This file was created with the following NCL script: +! The flanduse_timeseries file was created with the following NCL script (a copy of this script is in cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly): ! /glade/p/cgd/tss/people/oleson/modify_surfdata/modify_smallville_with_dynurban.ncl ! The file used as a template is: ! /glade/p/cesm/cseg/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc ! Key points are that urban area starts as 0, increases after the first year, then decreases after the second year. +! Medium density urban is set to zero to test the memory-saving behavior of PCT_URBAN_MAX. ! PCT_CROP is also changed so that PCT_URBAN + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_URBAN in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.) ! Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid. -flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c211206.nc' +! Feb 23, 2022: Use updated file with HASURBAN replaced by PCT_URBAN_MAX +!flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc' +flanduse_timeseries = '/glade/p/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc' diff --git a/src/main/clm_initializeMod.F90 b/src/main/clm_initializeMod.F90 index 1415152170..fc031657de 100644 --- a/src/main/clm_initializeMod.F90 +++ b/src/main/clm_initializeMod.F90 @@ -288,8 +288,9 @@ subroutine initialize2(ni,nj) ! Deallocate surface grid dynamic memory for variables that aren't needed elsewhere. ! Some things are kept until the end of initialize2; urban_valid is kept through the - ! end of the run for error checking. - deallocate (wt_lunit, wt_cft, wt_glc_mec, haslake, pct_urban_max) + ! end of the run for error checking, pct_urban_max is kept through the end of the run + ! for reweighting in subgridWeights. + deallocate (wt_lunit, wt_cft, wt_glc_mec, haslake) ! Determine processor bounds and clumps for this processor call get_proc_bounds(bounds_proc) diff --git a/src/main/clm_varsur.F90 b/src/main/clm_varsur.F90 index f9cafc28b2..d360941d23 100644 --- a/src/main/clm_varsur.F90 +++ b/src/main/clm_varsur.F90 @@ -51,7 +51,7 @@ module clm_instur ! whether we have urban to initialize in each grid cell ! (second dimension goes 1:numurbl) - double , pointer :: pct_urban_max(:,:) + real(r8), pointer :: pct_urban_max(:,:) !----------------------------------------------------------------------- end module clm_instur diff --git a/src/main/subgridMod.F90 b/src/main/subgridMod.F90 index bddacaafb8..645d02a603 100644 --- a/src/main/subgridMod.F90 +++ b/src/main/subgridMod.F90 @@ -352,7 +352,7 @@ subroutine subgrid_get_info_urban(gi, ltype, npatches, ncols, nlunits) ! For dynamic urban: to improve efficiency, 'PCT_URBAN_MAX' is added in landuse.timeseries ! that tells if any urban landunit ever grows in a given grid cell in a transient - ! run. The urban landunit is allocated only if PCT_URBAN_MAX is above 0. (#1572) + ! run. The urban landunit is allocated only if PCT_URBAN_MAX is greater than 0. (#1572) if (run_zero_weight_urban) then if (urban_valid(gi)) then @@ -360,12 +360,6 @@ subroutine subgrid_get_info_urban(gi, ltype, npatches, ncols, nlunits) else this_landunit_exists = .false. end if -! else -! if (wt_lunit(gi, ltype) > 0.0_r8) then -! this_landunit_exists = .true. -! else -! this_landunit_exists = .false. -! end if else if (urban_landunit_exists(gi, ltype)) then this_landunit_exists = .true. @@ -620,7 +614,7 @@ function urban_landunit_exists(gi, ltype) result(exists) ! !USES: use dynSubgridControlMod , only : get_do_transient_urban use clm_instur , only : pct_urban_max - use clm_varcon , only : isturb_MIN + use landunit_varcon , only : isturb_MIN ! ! !ARGUMENTS: logical :: exists ! function result @@ -628,7 +622,7 @@ function urban_landunit_exists(gi, ltype) result(exists) integer, intent(in) :: ltype !landunit type (isturb_tbd, etc.) ! ! !LOCAL VARIABLES: - integer :: dens_index ! urban density type index + integer :: dens_index ! urban density type index character(len=*), parameter :: subname = 'urban_landunit_exists' !----------------------------------------------------------------------- diff --git a/src/main/subgridWeightsMod.F90 b/src/main/subgridWeightsMod.F90 index 596d8880f5..c47ff79db7 100644 --- a/src/main/subgridWeightsMod.F90 +++ b/src/main/subgridWeightsMod.F90 @@ -301,6 +301,7 @@ logical function is_active_l(l, glc_behavior) ! ! !USES: use landunit_varcon, only : istsoil, istice, isturb_MIN, isturb_MAX, istdlak + use clm_instur , only : pct_urban_max ! ! !ARGUMENTS: implicit none @@ -309,6 +310,7 @@ logical function is_active_l(l, glc_behavior) ! ! !LOCAL VARIABLES: integer :: g ! grid cell index + integer :: dens_index ! urban density index !------------------------------------------------------------------------ if (all_active) then @@ -334,17 +336,17 @@ logical function is_active_l(l, glc_behavior) is_active_l = .true. end if -! if ((lun%itype(l) >= isturb_MIN .and. lun%itype(l) <= isturb_MAX) .and. & -! run_zero_weight_urban) then -! is_active_l = .true. -! end if - - ! Set urban land units to active, as long as memory is allocated for such land units. + ! Set urban land units to active, as long as memory has been allocated for such land units, either + ! through the run_zero_weight_urban setting which runs all urban landunits in each grid cell or + ! through pct_urban_max which is the maximum percent urban for each density type in a transient run. ! By doing this, urban land units are also run virtually in grid cells which will grow ! urban during the transient run. - if (lun%itype(l) >= isturb_MIN .and. lun%itype(l) <= isturb_MAX) then - is_active_l = .true. + if ( lun%itype(l) >= isturb_MIN .and. lun%itype(l) <= isturb_MAX ) then + dens_index = lun%itype(l) - isturb_MIN + 1 + if (run_zero_weight_urban .or. pct_urban_max(g,dens_index) > 0._r8) then + is_active_l = .true. + end if end if ! In general, include a virtual natural vegetation landunit. This aids diff --git a/src/main/surfrdMod.F90 b/src/main/surfrdMod.F90 index 72c592ff7f..27da9e24df 100644 --- a/src/main/surfrdMod.F90 +++ b/src/main/surfrdMod.F90 @@ -75,7 +75,7 @@ subroutine surfrd_get_data (begg, endg, ldomain, lfsurdat, actual_numcft) n_dom_landunits use fileutils , only : getfil use domainMod , only : domain_type, domain_init, domain_clean - use clm_instur , only : wt_lunit, topo_glc_mec + use clm_instur , only : wt_lunit, topo_glc_mec, pct_urban_max use landunit_varcon , only : max_lunit, istsoil, isturb_MIN, isturb_MAX use dynSubgridControlMod, only : get_flanduse_timeseries use dynSubgridControlMod, only : get_do_transient_lakes @@ -238,9 +238,13 @@ subroutine surfrd_get_data (begg, endg, ldomain, lfsurdat, actual_numcft) call surfrd_lakemask(begg, endg) end if - ! read the lakemask (necessary for initialization of dynamical urban) + ! read the urbanmask (necessary for initialization of dynamical urban) if (get_do_transient_urban()) then call surfrd_urbanmask(begg, endg) + else + ! Set this to zero here. pct_urban_max is used in subgridWeightsMod to check + ! whether urban landunits should be run virtually. + pct_urban_max(:,:) = 0._r8 end if end subroutine surfrd_get_data From bf21ba8ce8ff5ee4d9956e3c8450572b1f39aed4 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Thu, 24 Feb 2022 21:37:07 -0700 Subject: [PATCH 217/223] Update ChangeLog --- doc/ChangeLog | 58 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 59 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index 9abddd0f66..4aa053c9cb 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,62 @@ =============================================================== +Tag name: ctsm5.1.dev081 +Originator(s): swensosc (Sean Swenson) +Date: Thu Feb 24 21:33:35 MST 2022 +One-line Summary: Do not subtract irrigation from QRUNOFF diagnostic + +Purpose and description of changes +---------------------------------- + +Remove code that subtracts surface irrigation flux from qflx_runoff_col. +This is a diagnostic change only. + +This subtraction no longer makes sense now that irrigation is passed as +a separate flux to the ROF model. + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Testing summary: +---------------- + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- OK + izumi ------- OK + +Answer changes +-------------- + +Changes answers relative to baseline: YES, but just in a few diagnostic fields + + Summarize any changes to answers, i.e., + - what code configurations: Crop cases with irrigation + - what platforms/compilers: All + - nature of change (roundoff; larger than roundoff/same climate; new climate): + Larger than roundoff, but only impacts a few diagnostic fields: + QRUNOFF, QRUNOFF_R, QRUNOFF_TO_COUPLER + +Other details +------------- +Pull Requests that document the changes (include PR ids): +https://github.com/ESCOMP/CTSM/pull/1641 + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev080 Originator(s): sacks (Bill Sacks) Date: Thu Feb 24 15:26:02 MST 2022 diff --git a/doc/ChangeSum b/doc/ChangeSum index b389c325f8..146f690647 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev081 swensosc 02/24/2022 Do not subtract irrigation from QRUNOFF diagnostic ctsm5.1.dev080 sacks 02/24/2022 Use avg days per year when converting param units ctsm5.1.dev079 sacks 02/24/2022 Changes to CropPhenology timing ctsm5.1.dev078 sacks 02/24/2022 Rework single-point testing From f6c1479554e2fd093abbe3c8098922a08c56dc9e Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 25 Feb 2022 11:55:59 -0700 Subject: [PATCH 218/223] Various comments updated; python tests PASS; clm_pymods tests PASS --- doc/ChangeLog | 8 ----- python/ctsm/test/test_sys_fsurdat_modifier.py | 36 +++++++++++++++---- tools/modify_fsurdat/modify_template.cfg | 5 ++- 3 files changed, 34 insertions(+), 15 deletions(-) diff --git a/doc/ChangeLog b/doc/ChangeLog index b116ff2588..87ce73b2ca 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -58,14 +58,6 @@ Testing summary: (any machine) - cheyenne PASS - [If python code has changed and you are NOT running aux_clm (e.g., because the only changes are in python - code) then also run the clm_pymods test suite; this is a small subset of aux_clm that runs the system - tests impacted by python changes. The best way to do this, if you expect no changes from the last tag in - either model output or namelists, is: create sym links pointing to the last tag's baseline directory, - named with the upcoming tag; then run the clm_pymods test suite comparing against these baselines but NOT - doing their own baseline generation. If you are already running the full aux_clm then you do NOT need to - separately run the clm_pymods test suite, and you can remove the following line.] - clm_pymods test suite on cheyenne - PASS any other testing (give details below): diff --git a/python/ctsm/test/test_sys_fsurdat_modifier.py b/python/ctsm/test/test_sys_fsurdat_modifier.py index 8603769776..5754269d59 100755 --- a/python/ctsm/test/test_sys_fsurdat_modifier.py +++ b/python/ctsm/test/test_sys_fsurdat_modifier.py @@ -25,6 +25,15 @@ class TestSysFsurdatModifier(unittest.TestCase): """System tests for fsurdat_modifier""" def setUp(self): + """ + Obtain path to the existing: + - modify_template.cfg file + - /testinputs directory and fsurdat_in, located in /testinputs + Make /_tempdir for use by these tests. + Obtain path and names for the files being created in /_tempdir: + - modify_fsurdat.cfg + - fsurdat_out.nc + """ self._cfg_template_path = os.path.join(path_to_ctsm_root(), 'tools/modify_fsurdat/modify_template.cfg') testinputs_path = os.path.join(path_to_ctsm_root(), @@ -44,6 +53,7 @@ def tearDown(self): def test_minimalInfo(self): """ This test specifies a minimal amount of information + Create .cfg file, run the tool, compare fsurdat_in to fsurdat_out """ self._create_config_file_minimal() @@ -61,7 +71,8 @@ def test_minimalInfo(self): def test_crop(self): """ - This version replances the vegetation with a crop + This version replaces the vegetation with a crop + Create .cfg file, run the tool, compare fsurdat_in to fsurdat_out """ self._create_config_file_crop() @@ -77,7 +88,7 @@ def test_crop(self): # assert that fsurdat_out does not equal fsurdat_in self.assertFalse(fsurdat_out_data.equals(fsurdat_in_data)) - # compare fsurdat_out to fsurdat_out_baseline + # compare fsurdat_out to fsurdat_out_baseline located in /testinputs fsurdat_out_baseline = self._fsurdat_in[:-3] + '_modified_with_crop' + \ self._fsurdat_in[-3:] fsurdat_out_base_data = xr.open_dataset(fsurdat_out_baseline) @@ -88,6 +99,7 @@ def test_crop(self): def test_allInfo(self): """ This version specifies all possible information + Create .cfg file, run the tool, compare fsurdat_in to fsurdat_out """ self._create_config_file_complete() @@ -103,7 +115,7 @@ def test_allInfo(self): # assert that fsurdat_out does not equal fsurdat_in self.assertFalse(fsurdat_out_data.equals(fsurdat_in_data)) - # compare fsurdat_out to fsurdat_out_baseline + # compare fsurdat_out to fsurdat_out_baseline located in /testinputs fsurdat_out_baseline = self._fsurdat_in[:-3] + '_modified' + \ self._fsurdat_in[-3:] fsurdat_out_base_data = xr.open_dataset(fsurdat_out_baseline) @@ -112,7 +124,11 @@ def test_allInfo(self): def _create_config_file_minimal(self): - + """ + Open the new and the template .cfg files + Loop line by line through the template .cfg file + When string matches, replace that line's content + """ with open (self._cfg_file_path, 'w', encoding='utf-8') as cfg_out: with open (self._cfg_template_path, 'r', encoding='utf-8') as cfg_in: for line in cfg_in: @@ -124,7 +140,11 @@ def _create_config_file_minimal(self): def _create_config_file_crop(self): - + """ + Open the new and the template .cfg files + Loop line by line through the template .cfg file + When string matches, replace that line's content + """ with open (self._cfg_file_path, 'w', encoding='utf-8') as cfg_out: with open (self._cfg_template_path, 'r', encoding='utf-8') as cfg_in: for line in cfg_in: @@ -154,7 +174,11 @@ def _create_config_file_crop(self): def _create_config_file_complete(self): - + """ + Open the new and the template .cfg files + Loop line by line through the template .cfg file + When string matches, replace that line's content + """ with open (self._cfg_file_path, 'w', encoding='utf-8') as cfg_out: with open (self._cfg_template_path, 'r', encoding='utf-8') as cfg_in: for line in cfg_in: diff --git a/tools/modify_fsurdat/modify_template.cfg b/tools/modify_fsurdat/modify_template.cfg index 6b18cedc36..fa134d34e3 100644 --- a/tools/modify_fsurdat/modify_template.cfg +++ b/tools/modify_fsurdat/modify_template.cfg @@ -58,7 +58,10 @@ landmask_file = UNSET # PFT/CFT to be set to 100% according to user-defined mask. # If idealized = True and dom_plant = UNSET, the latter defaults to 0 -# (bare soil). Valid values 0 to 78 (int). +# (bare soil). Valid values range from 0 to a max value (int) that one can +# obtain from the fsurdat_in file using ncdump (or method preferred by user). +# The max valid value will equal (lsmpft - 1) and will also equal the last +# value of cft(cft). dom_plant = UNSET # LAI, SAI, HEIGHT_TOP, and HEIGHT_BOT values by month for dom_plant From 2d9989fece6c6643ebb35740e5f795cfc6f7bae1 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Mon, 28 Feb 2022 10:12:29 -0700 Subject: [PATCH 219/223] Update date on change files --- doc/ChangeLog | 2 +- doc/ChangeSum | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/ChangeLog b/doc/ChangeLog index 87ce73b2ca..2c3a65192a 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,7 +1,7 @@ =============================================================== Tag name: ctsm5.1.dev082 Originator(s): slevis (Samuel Levis,SLevis Consulting,303-665-1310) -Date: Fri Feb 25 10:42:41 MST 2022 +Date: Mon Feb 28 10:12:16 MST 2022 One-line Summary: Replace dom_nat_pft with dom_plant to enable crop in fsurdat_modifier tool Purpose and description of changes diff --git a/doc/ChangeSum b/doc/ChangeSum index 8ab5dc6631..5eaedfc0a6 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,6 +1,6 @@ Tag Who Date Summary ============================================================================================================================ - ctsm5.1.dev082 slevis 02/25/2022 Replace dom_nat_pft with dom_plant to enable crop in fsurdat_modifier tool + ctsm5.1.dev082 slevis 02/28/2022 Replace dom_nat_pft with dom_plant to enable crop in fsurdat_modifier tool ctsm5.1.dev081 swensosc 02/24/2022 Do not subtract irrigation from QRUNOFF diagnostic ctsm5.1.dev080 sacks 02/24/2022 Use avg days per year when converting param units ctsm5.1.dev079 sacks 02/24/2022 Changes to CropPhenology timing From 315a5d86e7a9b01ab16ec67643bd4fe15fac418d Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Thu, 3 Mar 2022 12:22:28 -0700 Subject: [PATCH 220/223] Move file into inputdata I also committed it to the inputdata repository --- .../testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm index dc9e2bf333..69a78ee17d 100644 --- a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm @@ -10,4 +10,4 @@ do_transient_urban = .true. ! Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid. ! Feb 23, 2022: Use updated file with HASURBAN replaced by PCT_URBAN_MAX !flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc' -flanduse_timeseries = '/glade/p/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc' +flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc' From 9afad9852e2e0d130a31e79b8b1d61a4703cc101 Mon Sep 17 00:00:00 2001 From: Keith Oleson Date: Thu, 3 Mar 2022 14:23:48 -0700 Subject: [PATCH 221/223] Make active urban landunits setting in subgridWeightsMod.F90 consistent with memory allocation in subgridMod.F90 --- src/main/subgridWeightsMod.F90 | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/main/subgridWeightsMod.F90 b/src/main/subgridWeightsMod.F90 index c47ff79db7..94c7fec504 100644 --- a/src/main/subgridWeightsMod.F90 +++ b/src/main/subgridWeightsMod.F90 @@ -339,14 +339,12 @@ logical function is_active_l(l, glc_behavior) ! Set urban land units to active, as long as memory has been allocated for such land units, either ! through the run_zero_weight_urban setting which runs all urban landunits in each grid cell or ! through pct_urban_max which is the maximum percent urban for each density type in a transient run. + ! (See subgridMod.F90 for this logic). ! By doing this, urban land units are also run virtually in grid cells which will grow ! urban during the transient run. - if ( lun%itype(l) >= isturb_MIN .and. lun%itype(l) <= isturb_MAX ) then - dens_index = lun%itype(l) - isturb_MIN + 1 - if (run_zero_weight_urban .or. pct_urban_max(g,dens_index) > 0._r8) then - is_active_l = .true. - end if + if ( (lun%itype(l) >= isturb_MIN .and. lun%itype(l) <= isturb_MAX) ) then + is_active_l = .true. end if ! In general, include a virtual natural vegetation landunit. This aids From 46d02a2bb94b6b7cf1e2169bb32a482c53e14b20 Mon Sep 17 00:00:00 2001 From: Keith Oleson Date: Tue, 8 Mar 2022 14:12:29 -0700 Subject: [PATCH 222/223] Updates to ChangeLog --- doc/ChangeLog | 67 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 68 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index 2c3a65192a..a3a671e3a3 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,71 @@ =============================================================== +Tag name: ctsm5.1.dev083 +Originator(s): fang-bowen (Bowen Fang) / oleson (Keith Oleson,UCAR/TSS,303-497-1332) + / Face2sea (Lei Zhao) / keerzhang1 (Keer Zhang) / sacks (Bill Sacks) +Date: Tue Mar 8 14:12:00 MST 2022 +One-line Summary: Implement PCT_URBAN_MAX to minimize dynamic urban memory + +Purpose and description of changes +---------------------------------- + +Read in 'PCT_URBAN_MAX' from the landuse timeseries file (maximum urban percentage throughout +timeseries) and initialize urban landunits in memory only where PCT_URBAN_MAX is greater than zero. + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed or introduced +------------------------ + +Issues fixed (include CTSM Issue #): #1572 + +Notes of particular relevance for users +--------------------------------------- +[Remove any lines that don't apply. Remove entire section if nothing applies.] + +Notes of particular relevance for developers: +--------------------------------------------- +NOTE: Be sure to review the steps in README.CHECKLIST.master_tags as well as the coding style in the Developers Guide + +Changes to tests or testing: +ERS_Lm25.1x1_smallvilleIA.IHistClm50BgcCropQianRs.cheyenne_gnu.clm-smallville_dynurban_monthly has +been updated to accomodate PCT_URBAN_MAX + +Testing summary: +---------------- + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- PASS + izumi ------- PASS + +Answer changes +-------------- + +Changes answers relative to baseline: NO + +Other details +------------- + +Pull Requests that document the changes (include PR ids): +https://github.com/ESCOMP/ctsm/pull/1661 + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev082 Originator(s): slevis (Samuel Levis,SLevis Consulting,303-665-1310) Date: Mon Feb 28 10:12:16 MST 2022 diff --git a/doc/ChangeSum b/doc/ChangeSum index 5eaedfc0a6..698b5e5b38 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev083 oleson 03/08/2022 Implement PCT_URBAN_MAX to minimize dynamic urban memory ctsm5.1.dev082 slevis 02/28/2022 Replace dom_nat_pft with dom_plant to enable crop in fsurdat_modifier tool ctsm5.1.dev081 swensosc 02/24/2022 Do not subtract irrigation from QRUNOFF diagnostic ctsm5.1.dev080 sacks 02/24/2022 Use avg days per year when converting param units From 2caf9eef15119da5bb987b0de37e948479fe7340 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Tue, 8 Mar 2022 15:04:18 -0700 Subject: [PATCH 223/223] Minor tweaks to ChangeLog and ChangeSum --- doc/ChangeLog | 8 +++----- doc/ChangeSum | 2 +- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/doc/ChangeLog b/doc/ChangeLog index a3a671e3a3..9661e9574f 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -31,11 +31,9 @@ Does this tag change answers significantly for any of the following physics conf Bugs fixed or introduced ------------------------ -Issues fixed (include CTSM Issue #): #1572 - -Notes of particular relevance for users ---------------------------------------- -[Remove any lines that don't apply. Remove entire section if nothing applies.] +Issues fixed (include CTSM Issue #): +- Resolves ESCOMP/CTSM#1572 (Improve mechanism for determining where to + allocate memory for urban when running with dynamic urban) Notes of particular relevance for developers: --------------------------------------------- diff --git a/doc/ChangeSum b/doc/ChangeSum index 698b5e5b38..a7cc71ff1f 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,6 +1,6 @@ Tag Who Date Summary ============================================================================================================================ - ctsm5.1.dev083 oleson 03/08/2022 Implement PCT_URBAN_MAX to minimize dynamic urban memory + ctsm5.1.dev083 multiple 03/08/2022 Implement PCT_URBAN_MAX to minimize dynamic urban memory ctsm5.1.dev082 slevis 02/28/2022 Replace dom_nat_pft with dom_plant to enable crop in fsurdat_modifier tool ctsm5.1.dev081 swensosc 02/24/2022 Do not subtract irrigation from QRUNOFF diagnostic ctsm5.1.dev080 sacks 02/24/2022 Use avg days per year when converting param units