From 0527aa83d48ba98b3a6facb6bc465a35a87dd855 Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 20 Aug 2024 12:53:55 -0700 Subject: [PATCH 1/8] add external land use data tool as a submodule to fates --- .gitmodules | 3 + tools/landuse | 1 + tools/luh2/README.md | 55 --------- tools/luh2/conda-luh2.yml | 11 -- tools/luh2/luh2.py | 146 ---------------------- tools/luh2/luh2.sh | 57 --------- tools/luh2/luh2mod.py | 254 -------------------------------------- 7 files changed, 4 insertions(+), 523 deletions(-) create mode 100644 .gitmodules create mode 160000 tools/landuse delete mode 100644 tools/luh2/README.md delete mode 100644 tools/luh2/conda-luh2.yml delete mode 100644 tools/luh2/luh2.py delete mode 100755 tools/luh2/luh2.sh delete mode 100644 tools/luh2/luh2mod.py diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..aec4197a72 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "tools/landuse"] + path = tools/landuse + url = https://github.com/NGEET/tools-fates-landusedata diff --git a/tools/landuse b/tools/landuse new file mode 160000 index 0000000000..88108f3b26 --- /dev/null +++ b/tools/landuse @@ -0,0 +1 @@ +Subproject commit 88108f3b2617c30d2f1f59a587069116b9ffdc06 diff --git a/tools/luh2/README.md b/tools/luh2/README.md deleted file mode 100644 index 60305c7ad1..0000000000 --- a/tools/luh2/README.md +++ /dev/null @@ -1,55 +0,0 @@ -# FATES LUH2 data tool README - -## Purpose - -This tool takes the raw Land Use Harmonization (https://luh.umd.edu/), or LUH2, data files as -input and prepares them for use with FATES. The tool concatenates the various raw data sets into -a single file and provides the ability to regrid the source data resolution to a target -resolution that the user designates. The output data is then usable by FATES, mediated through -a host land model (currently either CTSM or E3SM). - -For more information on how FATES utilizes this information see https://github.com/NGEET/fates/pull/1040. - -## Installation - -This tool requires the usage of conda with python3. See https://docs.conda.io/en/latest/miniconda.html#installing -for information on installing conda on your system. To install the conda environment necessary to run the tool -execute the following commands: - -conda env create -f conda-luh2.yml - -This will create a conda environment named "luh2". To activate this environment run: - -conda activate luh2 - -For more information on creating conda environments see -https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#creating-an-environment-from-an-environment-yml-file - -Note that it is planned that a subset of host land model (hlm) and hlm supported machines will incoporate this tool into the surface dataset workflow. -As such, if you are working on one of these machines, the output from this tool may be precomputed and available for the grid resolution of interest. - -## Usage - -After activating the "luh2" environment the tool can be run from the command line with the following minimum required inputs: - -python luh2.py -l -s -r -w -o - -The description of the minimum required input arguments is as follows: -- raw-luh2-datafile: this is one of three raw luh2 datafiles, either states, transitions, or management. This is the data to be regridded and used by FATES. -- luh2-static-datafile: supplementary 0.25 deg resolution static data used in the construction of the raw luh2 datafiles. This is utilized to help set the gridcell mask for the output file. -- regrid-targetfile: host land model surface data file intended to be used in conjunction with the fates run at a specific grid resolution. This is used as the regridder target resolution. -- regridder-output: the path and filename to write out the regridding weights file or to use an existing regridding weights file. -- outputfile: the path and filename to which the output is written - -The tool is intended to be run three times, sequentially, to concatenate the raw states, transitions, and management data into a single file. After the first run of -the tool, a merge option should also be included in the argument list pointing to the most recent output file. This will ensure that the previous regridding run -will be merged into the current run as well as reusing the previously output regridding weights file (to help reduce duplicate computation). -The luh2.sh file in this directory provides an example shell script in using the python tool in this sequential manner. The python tool itself provides additional -help by passing the `--help` option argument to the command line call. - -## Description of directory contents - -- luh2.py: main luh2 python script -- luh2mod.py: python module source file for the functions called in luh2.py -- luh2.sh: example bash shell script file demonstrating how to call luh2.py -- conda-luh2.yml: conda enviroment yaml file which defines the minimum set of package dependencies for luh2.py diff --git a/tools/luh2/conda-luh2.yml b/tools/luh2/conda-luh2.yml deleted file mode 100644 index 12d4a35c65..0000000000 --- a/tools/luh2/conda-luh2.yml +++ /dev/null @@ -1,11 +0,0 @@ -# This yaml file is intended for users who wish to utilize the luh2.py tool on their own machines. -# The file is not yet tested regularly to determine if the latest versions of the dependencies will -# always work. This regular testing is expected to be implemented in the future. -name: luh2 -channels: - - conda-forge - - defaults -dependencies: - - xesmf - # xarray which is autodownloaded as xesmf dependency, uses scipy, which needs netcdf4 to open datasets - - netcdf4 diff --git a/tools/luh2/luh2.py b/tools/luh2/luh2.py deleted file mode 100644 index ec111c88f6..0000000000 --- a/tools/luh2/luh2.py +++ /dev/null @@ -1,146 +0,0 @@ -#!/usr/bin/env python3 - -# LUH2 python script -# Usage: python luh2.py -l -s \ -# -r -w -o - -import argparse, os, sys -from luh2mod import ImportData, SetMaskLUH2, SetMaskSurfData -from luh2mod import RegridConservative, RegridLoop, CorrectStateSum - -# Add version checking here in case environment.yml not used -def main(): - - # Add argument parser - subfunction? Seperate common module? - # input_files and range should be the only arguments - # Allow variable input files (state and/or transitions and/or management) - args = CommandLineArgs() - - # Import and prep the LUH2 datasets and regrid target - ds_luh2 = ImportData(args.luh2_file,args.begin,args.end) - ds_regrid_target = ImportData(args.regridder_target_file,args.begin,args.end) - - # Import the LUH2 static data to use for masking - ds_luh2_static = ImportData(args.luh2_static_file) - - # Create new variable where the ice water fraction is inverted w - ds_luh2_static["landfrac"] = 1 - ds_luh2_static.icwtr - - # Mask all LUH2 input data using the ice/water fraction for the LUH2 static data - ds_luh2 = SetMaskLUH2(ds_luh2, ds_luh2_static) - ds_luh2_static = SetMaskLUH2(ds_luh2_static, ds_luh2_static) - - # Mask the regrid target - ds_regrid_target = SetMaskSurfData(ds_regrid_target) - - # Determine if we are saving a new regridder or using an old one - # TO DO: add check to handle if the user enters the full path - # TO DO: check if its possible to enter nothing with the argument - regrid_reuse = False - # If we are merging files together, we assume that the weights file - # being supplied exists on file - if (not isinstance(args.luh2_merge_file,type(None))): - regrid_reuse = True - - # Regrid the luh2 data to the target grid - # TO DO: provide a check for the save argument based on the input arguments - regrid_luh2,regridder_luh2 = RegridConservative(ds_luh2, ds_regrid_target, - args.regridder_weights, regrid_reuse) - - # Regrid the inverted ice/water fraction data to the target grid - regrid_land_fraction = regridder_luh2(ds_luh2_static) - - # Adjust the luh2 data by the land fraction - # TO DO: determine if this is necessary for the transitions and management data - regrid_luh2 = regrid_luh2 / regrid_land_fraction.landfrac - - # Correct the state sum (checks if argument passed is state file in the function) - regrid_luh2 = CorrectStateSum(regrid_luh2) - - # Add additional required variables for the host land model - # Add 'YEAR' as a variable. - # If we are merging, we might not need to do this, so check to see if its there already - # This is a requirement of the HLM dyn_subgrid module and should be the actual year. - # Note that the time variable from the LUH2 data is 'years since ...' so we need to - # add the input data year - if (not "YEAR" in list(regrid_luh2.variables)): - regrid_luh2["YEAR"] = regrid_luh2.time + ds_luh2.timesince - regrid_luh2["LONGXY"] = ds_regrid_target["LONGXY"] # TO DO: double check if this is strictly necessary - regrid_luh2["LATIXY"] = ds_regrid_target["LATIXY"] # TO DO: double check if this is strictly necessary - - # Rename the dimensions for the output. This needs to happen after the "LONGXY/LATIXY" assignment - if (not 'lsmlat' in list(regrid_luh2.dims)): - regrid_luh2 = regrid_luh2.rename_dims({'lat':'lsmlat','lon':'lsmlon'}) - - # Reapply the coordinate attributes. This is a workaround for an xarray bug (#8047) - # Currently only need time - regrid_luh2.time.attrs = ds_luh2.time.attrs - regrid_luh2.lat.attrs = ds_luh2.lat.attrs - regrid_luh2.lon.attrs = ds_luh2.lon.attrs - - # Merge existing regrided luh2 file with merge input target - # TO DO: check that the grid resolution - # We could do this with an append during the write phase instead of the merge - if (not(isinstance(args.luh2_merge_file,type(None)))): - ds_luh2_merge = ImportData(args.luh2_merge_file,args.begin,args.end,merge_flag=True) - #ds_luh2_merge = ds_luh2_merge.merge(regrid_luh2) - regrid_luh2 = regrid_luh2.merge(ds_luh2_merge) - - # Write the files - # TO DO: add check to handle if the user enters the full path - output_file = os.path.join(os.getcwd(),args.output) - print("generating output: {}".format(output_file)) - regrid_luh2.to_netcdf(output_file) - -def CommandLineArgs(): - - parser = argparse.ArgumentParser(description="placeholder desc") - - # Required input luh2 datafile - # TO DO: using the checking function to report back if invalid file input - parser.add_argument("-l","--luh2_file", - required=True, - help = "luh2 raw states, transitions, or management data file") - - # Required static luh2 data to get the ice/water fraction for masking - parser.add_argument("-s", "--luh2_static_file", - required=True, - help = "luh2 static data file") - - # File to use as regridder target (e.g. a surface dataset) - parser.add_argument("-r","--regridder_target_file", - required=True, - help = "target file with desired resolution to regrid luh2 data to") - - # Filename to use or save for the regridder weights - parser.add_argument("-w", "--regridder_weights", - default = 'regridder.nc', - help = "filename of regridder weights to write to or reuse (if -m option used)") - - # Optional input to subset the time range of the data - # TODO: add support for parsing the input and checking against the allowable date range - parser.add_argument("-b","--begin", - type = int, - default = None, - help = "beginning of date range of interest") - parser.add_argument("-e","--end", - type = int, - default = None, - help = "ending of date range to slice") - - # Optional output argument - parser.add_argument("-o","--output", - default = 'LUH2_timeseries.nc', - help = "output filename") - - # Optional merge argument to enable merging of other files - parser.add_argument("-m", "--luh2_merge_file", - default = None, - help = "previous luh2 output filename to merge into current run output") - - args = parser.parse_args() - - return(args) - -if __name__ == "__main__": - main() diff --git a/tools/luh2/luh2.sh b/tools/luh2/luh2.sh deleted file mode 100755 index 3aa246907d..0000000000 --- a/tools/luh2/luh2.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/bash -# WARNING: This script generates intermediate copies of the LUH2 -# data which at its peak takes up approximately 42G of space. -# -# Note that this script must be run with the luh2 conda environment -# It requires a single argument that points to the full path location -# of the luh2 data and the dataset to regrid against - -# LUH2 data names -DATA_LOC=$1 -STATIC_LOC=$2 -TARGET_LOC=$3 -OUTPUT_LOC=$4 -STATES_FILE=states.nc -TRANSITIONS_FILE=transitions.nc -MANAGE_FILE=management.nc -STATIC_FILE=staticData_quarterdeg.nc -REGRID_TARGET_FILE=surfdata_4x5_16pfts_Irrig_CMIP6_simyr2000_c170824.nc - -START=1850 -END=2015 - -# Save files -REGRID_SAVE=regridder.nc -OUTPUT_FILE=LUH2_historical_0850_2015_4x5.nc - -# Combine strings -STATES=${DATA_LOC}/${STATES_FILE} -TRANSITIONS=${DATA_LOC}/${TRANSITIONS_FILE} -MANAGE=${DATA_LOC}/${MANAGE_FILE} -STATIC=${STATIC_LOC}/${STATIC_FILE} -REGRID_TARGET=${TARGET_LOC}/${REGRID_TARGET_FILE} -REGRIDDER=${OUTPUT_LOC}/${REGRID_SAVE} - -# Comment this out if the user already has the modified datasets available - -# Regrid the luh2 data against a target surface data set and then remove the states_modified file -echo "starting storage" -du -h ${OUTPUT_LOC} -python luh2.py -b ${START} -e ${END} -l ${STATES} -s ${STATIC} -r ${REGRID_TARGET} -w ${REGRIDDER} -o ${OUTPUT_LOC}/states_regrid.nc -echo -e"storage status:\n" -du -h ${OUTPUT_LOC} - -# Regrid the luh2 transitions data using the saved regridder weights file and merge into previous regrid output -python luh2.py -b ${START} -e ${END} -l ${TRANSITIONS} -s ${STATIC} -r ${REGRID_TARGET} -w ${REGRIDDER} \ - -m ${OUTPUT_LOC}/states_regrid.nc -o ${OUTPUT_LOC}/states_trans_regrid.nc -echo -e"storage status:\n" -du -h ${OUTPUT_LOC} -rm ${DATA_LOC}/states_regrid.nc - -# Regrid the luh2 management data using the saved regridder file and merge into previous regrid output -python luh2.py -b ${START} -e ${END} -l ${MANAGE} -s ${STATIC} -r ${REGRID_TARGET} -w ${REGRIDDER} \ - -m ${OUTPUT_LOC}/states_trans_regrid.nc -o ${OUTPUT_LOC}/${OUTPUT_FILE} -echo -e"storage status:\n" -du -h ${OUTPUT_LOC} -rm ${OUTPUT_LOC}/states_trans_regrid.nc -rm ${REGRIDDER} diff --git a/tools/luh2/luh2mod.py b/tools/luh2/luh2mod.py deleted file mode 100644 index 801baa96fc..0000000000 --- a/tools/luh2/luh2mod.py +++ /dev/null @@ -1,254 +0,0 @@ -#!/usr/bin/env python3 - -import re, sys -import numpy as np -import xarray as xr -import xesmf as xe - -# Import luh2 or surface data sets -def ImportData(input_file,start=None,stop=None,merge_flag=False): - - # Open files - # Set decode_times to false as the luh2 raw data is outside the range - # of the standard NetCDF datetime format. - datasetout = xr.open_dataset(input_file, cache=False, decode_times=False) - print("Input file dataset opened: {}".format(input_file)) - - # Prep the input data for use - datasetout = PrepDataset(datasetout,start,stop,merge_flag) - - return(datasetout) - -# Prepare the input_file to be used for regridding -def PrepDataset(input_dataset,start=None,stop=None,merge_flag=False): - - # Check that the input dataset is a valid type - dsflag, dstype = CheckDataset(input_dataset) - - # Use the maximum span if start and stop are not present - # This assumes that the luh2 raw data will always use a - # 'years since' style format. - if(not(dstype in ('static','regrid'))): - - if ('LUH2' in dstype): - # Get the units to determine the file time - # It is expected that the units of time is 'years since ...' - time_since_array = input_dataset.time.units.split() - if (time_since_array[0] != 'years'): - sys.exit("FileTimeUnitsError: input file units of time is not 'years since ...'") - - # Note that datetime package is not used as the date range might - # be beyond the bounds of the packages applicable bounds - time_since = int(time_since_array[2].split('-')[0]) - - # Get the time bounds of the input file - start_bound = input_dataset.time.values[0] - stop_bound = input_dataset.time.values[-1] - - # If no input provided, simply get the bounds of the time - if (isinstance(start,type(None))): - start = start_bound + time_since - - if (isinstance(stop,type(None))): - stop = stop_bound + time_since - - # Convert the input dates to years since 0850 - years_since_start = start - time_since - years_since_stop = stop - time_since - - # Abort if the times provided are outside the applicable range - if (years_since_start < start_bound or years_since_stop < start_bound or - years_since_start > stop_bound or years_since_stop > stop_bound): - sys.exit("StartStopBoundError: the input start or stop date is outside the applicable range of {} to {}".format(time_since+start_bound,time_since+stop_bound)) - - # Truncate the data to the user defined range - # This might need some more error handling for when - # the start/stop is out of range - input_dataset = input_dataset.sel(time=slice(years_since_start,years_since_stop)) - - # Save the timesince as a variable for future use - input_dataset["timesince"] = time_since - - # Correct the necessary variables for both datasets - # We don't need to Prep the incoming dataset if it's being opened to merge - if(not merge_flag): - input_dataset = PrepDataset_ESMF(input_dataset,dsflag,dstype) - - return(input_dataset) - -# Updating datasets to work with xESMF -def PrepDataset_ESMF(input_dataset,dsflag,dstype): - - if (dsflag): - if("LUH2" in dstype): - print("PrepDataset: LUH2") - input_dataset = BoundsVariableFixLUH2(input_dataset) - elif(dstype == "surface"): - print("PrepDataset: SurfData") - input_dataset = DimensionFixSurfData(input_dataset) - print("data set updated for xESMF\n") - - return(input_dataset) - -# Create the necessary variable "lat_b" and "lon_b" for xESMF conservative regridding -# Each lat/lon boundary array is a 2D array corresponding to the bounds of each -# coordinate position (e.g. lat_boundary would be 90.0 and 89.75 for lat coordinate -# of 89.875). -def BoundsVariableFixLUH2(input_dataset): - - # Create lat and lon bounds as a single dimension array out of the LUH2 two dimensional_bounds array. - # Future todo: is it possible to have xESMF recognize and use the original 2D array? - input_dataset["lat_b"] = np.insert(input_dataset.lat_bounds[:,1].data,0,input_dataset.lat_bounds[0,0].data) - input_dataset["lon_b"] = np.insert(input_dataset.lon_bounds[:,1].data,0,input_dataset.lon_bounds[0,0].data) - - # Drop the old boundary names to avoid confusion - input_dataset = input_dataset.drop(labels=['lat_bounds','lon_bounds']) - - print("LUH2 dataset lat/lon boundary variables formatted and added as new variable for xESMF") - - return(input_dataset) - -# The user will need to use a surface data set to regrid from, but the surface datasets -# need to have their dimensions renamed to something recognizable by xESMF -def DimensionFixSurfData(input_dataset): - - # Rename the surface dataset dimensions to something recognizable by xESMF. - input_dataset = input_dataset.rename_dims(dims_dict={'lsmlat':'lat','lsmlon':'lon'}) - - # Populate the new surface dataset with the actual lat/lon values - input_dataset['lon'] = input_dataset.LONGXY.isel(lat=0) - input_dataset['lat'] = input_dataset.LATIXY.isel(lon=0) - - print("Surface dataset dimensions renamed for xESMF") - - return(input_dataset) - -# LUH2 specific masking sub-function -def SetMaskLUH2(input_dataset,static_data_set): - - # Mask the luh2 data where the ice/water fraction is unity (i.e. fully ice covered gridcell) - input_dataset["mask"] = (static_data_set.icwtr != 1) - return(input_dataset) - -# Surface dataset specific masking sub-function -def SetMaskSurfData(input_dataset): - # Instead of passing the label_to_mask, loop through this for all labels? - input_dataset["mask"] = input_dataset["PCT_NATVEG"] > 0 - return(input_dataset) - -# Check which dataset we're working with -def CheckDataset(input_dataset): - - dsflag = False - dsvars = list(input_dataset.variables) - if(any('primf' in subname for subname in dsvars) or - any('irrig' in subname for subname in dsvars)): - if ('primf_to_secdn' in dsvars): - dstype = 'LUH2_transitions' - else: - dstype = 'LUH2' - - dsflag = True - # print("LUH2") - elif('natpft' in dsvars): - dstype = 'surface' - dsflag = True - # print("Surface") - elif('icwtr' in dsvars): - dstype = 'static' - dsflag = True - elif('col' in dsvars): - dstype = 'regrid' - dsflag = True - else: - dstype = 'Unknown' - sys.exit("CheckDataSetError: Unrecognize data set") - - return(dsflag,dstype) - -def RegridConservative(ds_to_regrid, ds_regrid_target, regridder_weights, regrid_reuse): - - # define the regridder transformation - regridder = GenerateRegridder(ds_to_regrid, ds_regrid_target, regridder_weights, regrid_reuse) - - # Loop through the variables to regrid - ds_regrid = RegridLoop(ds_to_regrid, regridder) - - return (ds_regrid, regridder) - -def GenerateRegridder(ds_to_regrid, ds_regrid_target, regridder_weights_file, regrid_reuse): - - regrid_method = "conservative" - print("\nDefining regridder, method: ", regrid_method) - - if (regrid_reuse): - regridder = xe.Regridder(ds_to_regrid, ds_regrid_target, - regrid_method, weights=regridder_weights_file) - else: - regridder = xe.Regridder(ds_to_regrid, ds_regrid_target, regrid_method) - - # If we are not reusing the regridder weights file, then save the regridder - filename = regridder.to_netcdf(regridder_weights_file) - print("regridder saved to file: ", filename) - - return(regridder) - -def RegridLoop(ds_to_regrid, regridder): - - # To Do: implement this with dask - print("\nRegridding") - - # Loop through the variables one at a time to conserve memory - ds_varnames = list(ds_to_regrid.variables.keys()) - varlen = len(ds_to_regrid.variables) - first_var = False - for i in range(varlen-1): - - # Skip time variable - if (not "time" in ds_varnames[i]): - - # Only regrid variables that match the lat/lon shape. - if (ds_to_regrid[ds_varnames[i]][0].shape == (ds_to_regrid.lat.shape[0], ds_to_regrid.lon.shape[0])): - print("regridding variable {}/{}: {}".format(i+1, varlen, ds_varnames[i])) - - # For the first non-coordinate variable, copy and regrid the dataset as a whole. - # This makes sure to correctly include the lat/lon in the regridding. - if (not(first_var)): - ds_regrid = ds_to_regrid[ds_varnames[i]].to_dataset() # convert data array to dataset - ds_regrid = regridder(ds_regrid) - first_var = True - - # Once the first variable has been included, then we can regrid by variable - else: - ds_regrid[ds_varnames[i]] = regridder(ds_to_regrid[ds_varnames[i]]) - else: - print("skipping variable {}/{}: {}".format(i+1, varlen, ds_varnames[i])) - else: - print("skipping variable {}/{}: {}".format(i+1, varlen, ds_varnames[i])) - - print("\n") - return(ds_regrid) - -# Temporary: Add minor correction factor to assure states sum to one -def CorrectStateSum(input_dataset): - - # Only calculate the state sum to unity correction for the appropiate dataset - # TO DO: Update this to use the check function - if (not(any('irrig' in var for var in input_dataset) or - any('_to_' in var for var in input_dataset))): - - # Drop the secma and secmb variables temporarily - temp_dataset = input_dataset.drop({'secma','secmb'}) - - # Sum the remaining state variables and normalize - state_sum = temp_dataset.to_array().sum(dim='variable') - state_sum = state_sum.where(state_sum != 0) - temp_dataset = temp_dataset / state_sum - - # Update dataset with new scaled values - input_dataset.update(temp_dataset) - - # Save the correction value - input_dataset["stscf"] = 1.0 / state_sum - - return(input_dataset) From 0fdbe1669f579770c9a519cb320704570b2079c7 Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 20 Aug 2024 14:08:25 -0700 Subject: [PATCH 2/8] rename the land use data tool directory --- .gitmodules | 2 +- tools/{landuse => landusedata} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename tools/{landuse => landusedata} (100%) diff --git a/.gitmodules b/.gitmodules index aec4197a72..2ede6ae5bc 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "tools/landuse"] - path = tools/landuse + path = tools/landusedata url = https://github.com/NGEET/tools-fates-landusedata diff --git a/tools/landuse b/tools/landusedata similarity index 100% rename from tools/landuse rename to tools/landusedata From 6fe16854e2ada75533b3ae59611d3b7428b2427a Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 20 Aug 2024 14:12:23 -0700 Subject: [PATCH 3/8] update submodule name in .gitmodules --- .gitmodules | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitmodules b/.gitmodules index 2ede6ae5bc..d5ce9c594e 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ -[submodule "tools/landuse"] +[submodule "tools/landusedata"] path = tools/landusedata url = https://github.com/NGEET/tools-fates-landusedata From 588a9805efb919199a34eca0123fa6869ba530b9 Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Wed, 21 Aug 2024 09:12:50 -0700 Subject: [PATCH 4/8] add fx type variables for git-fleximod usability --- .gitmodules | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitmodules b/.gitmodules index d5ce9c594e..2b06f80735 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,6 @@ [submodule "tools/landusedata"] path = tools/landusedata url = https://github.com/NGEET/tools-fates-landusedata + fxrequired = AlwaysRequired + # Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed + fxDONOTUSEurl = https://github.com/NGEET/tools-fates-landusedata From e65a3a9b48dffb7930b916515dc0d4ce49bd12f6 Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 29 Oct 2024 13:23:07 -0700 Subject: [PATCH 5/8] add tag --- .gitmodules | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitmodules b/.gitmodules index 2b06f80735..35bd8c37ed 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,3 +4,4 @@ fxrequired = AlwaysRequired # Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed fxDONOTUSEurl = https://github.com/NGEET/tools-fates-landusedata + fxtag = v0.1.1 From 58b9e81a369d4eb75b7e077580644ad23e72a6d1 Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 29 Oct 2024 13:31:19 -0700 Subject: [PATCH 6/8] update landuse tool version --- tools/landusedata | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/landusedata b/tools/landusedata index 88108f3b26..5b35732921 160000 --- a/tools/landusedata +++ b/tools/landusedata @@ -1 +1 @@ -Subproject commit 88108f3b2617c30d2f1f59a587069116b9ffdc06 +Subproject commit 5b357329214ac3b647daa3d471fb9fac9c116a54 From 710ae0d3411819eeef94111c94b681cd846f084b Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 29 Oct 2024 13:36:56 -0700 Subject: [PATCH 7/8] minor format update --- .gitmodules | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitmodules b/.gitmodules index 35bd8c37ed..63e597cb70 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,7 +1,7 @@ [submodule "tools/landusedata"] path = tools/landusedata url = https://github.com/NGEET/tools-fates-landusedata + fxtag = v0.1.1 fxrequired = AlwaysRequired # Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed fxDONOTUSEurl = https://github.com/NGEET/tools-fates-landusedata - fxtag = v0.1.1 From 464a7ec3da81e164fa4159d9288f9ac37bf471fb Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 29 Oct 2024 13:40:23 -0700 Subject: [PATCH 8/8] add truncated version of suggested header comments to .gitmodules --- .gitmodules | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.gitmodules b/.gitmodules index 63e597cb70..7dc4d3c410 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,9 @@ +# This is a git submodule file to include additional submodules when FATES is checkout out. +# +# It includes optional additional support for +# git-fleximod (https://github.com/ESMCI/git-fleximod) +# that will be used when git-fleximod is used to check it out (i.e. for CESM) +# [submodule "tools/landusedata"] path = tools/landusedata url = https://github.com/NGEET/tools-fates-landusedata