diff --git a/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh b/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh index 257cd89129..2183ead891 100755 --- a/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh +++ b/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh @@ -759,7 +759,7 @@ This is probably because it is a directory. Please correct and rerun." test_names=("${prim_test_names[@]}") test_subdirs=("${prim_test_subdirs[@]}") if [ "${num_alt_tests}" -gt "0" ]; then - test_names+=("${alt_test_subdirs[@]:-}") + test_names+=("${alt_test_names[@]:-}") test_subdirs+=("${alt_test_subdirs[@]:-}") fi # @@ -1025,7 +1025,7 @@ Please correct and rerun." # listed first. # # Finally, we extract from test_ids_and_inds_sorted the second number -# in each element (the one afte the first number, which is the test ID, +# in each element (the one after the first number, which is the test ID, # and the test type, which we no longer need), which is the original # array index before sorting, and save the results in the array sort_inds. # This array will contain the original indices in sorted order that we diff --git a/tests/WE2E/get_expts_status.sh b/tests/WE2E/get_expts_status.sh index 5c05acafb1..997bfb6b8f 100755 --- a/tests/WE2E/get_expts_status.sh +++ b/tests/WE2E/get_expts_status.sh @@ -50,7 +50,7 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -homerrfs=${scrfunc_dir%/*} +homerrfs=${scrfunc_dir%/*/*} # #----------------------------------------------------------------------- # diff --git a/tests/WE2E/run_WE2E_tests.sh b/tests/WE2E/run_WE2E_tests.sh index b8eaeec74f..dd1f506133 100755 --- a/tests/WE2E/run_WE2E_tests.sh +++ b/tests/WE2E/run_WE2E_tests.sh @@ -538,8 +538,8 @@ accordingly and rerun." if [ "${match_found}" = "FALSE" ]; then avail_WE2E_test_names_str=$( printf " \"%s\"\n" "${avail_WE2E_test_names[@]}" ) print_err_msg_exit "\ -The name current user-specified test to run (user_spec_test) does not -match any of the names (either primary or alternate) of the available +The name of the current user-specified test to run (user_spec_test) does +not match any of the names (either primary or alternate) of the available WE2E tests: user_spec_test = \"${user_spec_test}\" Valid values for user_spec_test consist of the names (primary or alternate) diff --git a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh index 52aad5cfa6..2375a648f5 100644 --- a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh +++ b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.sh @@ -2,10 +2,26 @@ # TEST PURPOSE/DESCRIPTION: # ------------------------ # -# This test ensures that the various workflow tasks can be deactivated, -# i.e. removed from the Rocoto XML. Note that we leave the MAKE_GRID_TN, -# MAKE_OROG_TN, and MAKE_SFC_CLIMO_TN activated because there is a -# separate test for turning those off. +# This test has two purposes: +# +# 1) It checks that the various workflow tasks can be deactivated, i.e. +# removed from the Rocoto XML. +# 2) It checks the capability of the workflow to use "template" experiment +# variables, i.e. variables whose definitions include references to +# other variables, e.g. +# +# MY_VAR='\${ANOTHER_VAR}' +# +# Note that we do not deactivate all tasks in the workflow; we leave the +# MAKE_GRID_TN, MAKE_OROG_TN, and MAKE_SFC_CLIMO_TN activated because: +# +# 1) There is already a WE2E test that runs with these three tasks +# deactivated (that test is to ensure that pre-generated grid, +# orography, and surface climatology files can be used). +# 2) In checking the template variable capability, we want to make sure +# that the variable defintions file (GLOBAL_VAR_DEFNS_FN) generated +# does not have syntax or other errors in it by sourcing it in these +# three tasks. # RUN_ENVIR="community" @@ -14,13 +30,31 @@ PREEXISTING_DIR_METHOD="rename" PREDEF_GRID_NAME="RRFS_CONUS_25km" CCPP_PHYS_SUITE="FV3_GFS_v15p2" -DATE_FIRST_CYCL="20190615" -DATE_LAST_CYCL="20190615" +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" +USE_USER_STAGED_EXTRN_FILES="TRUE" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" CYCL_HRS=( "00" ) +FCST_LEN_HRS="6" +LBC_SPEC_INTVL_HRS="3" + RUN_TASK_GET_EXTRN_ICS="FALSE" RUN_TASK_GET_EXTRN_LBCS="FALSE" RUN_TASK_MAKE_ICS="FALSE" RUN_TASK_MAKE_LBCS="FALSE" RUN_TASK_RUN_FCST="FALSE" RUN_TASK_RUN_POST="FALSE" +# +# The following shows examples of how to define template variables. Here, +# we define RUN_CMD_UTILS, RUN_CMD_FCST, and RUN_CMD_POST as template +# variables. Note that during this test, these templates aren't actually +# expanded/used (something that would be done using bash's "eval" built-in +# command) anywhere in the scripts. They are included here only to verify +# that the test completes with some variables defined as templates. +# +RUN_CMD_UTILS='cd $yyyymmdd' +RUN_CMD_FCST='mpirun -np ${PE_MEMBER01}' +RUN_CMD_POST='echo hello $yyyymmdd' diff --git a/tests/WE2E/test_configs/wflow_features/config.template_vars.sh b/tests/WE2E/test_configs/wflow_features/config.template_vars.sh new file mode 120000 index 0000000000..80ede54374 --- /dev/null +++ b/tests/WE2E/test_configs/wflow_features/config.template_vars.sh @@ -0,0 +1 @@ +config.deactivate_tasks.sh \ No newline at end of file diff --git a/ush/bash_utils/check_var_valid_value.sh b/ush/bash_utils/check_var_valid_value.sh index 7a0e20707b..576ad6b1b8 100644 --- a/ush/bash_utils/check_var_valid_value.sh +++ b/ush/bash_utils/check_var_valid_value.sh @@ -108,24 +108,34 @@ The value specified in ${var_name} is not supported: # #----------------------------------------------------------------------- # -# Check whether var_value is equal to one of the elements of the array -# valid_var_values. If not, print out an error message and exit the -# calling script. +# If var_value contains a dollar sign, we assume the corresponding variable +# (var_name) is a template variable, i.e. one whose value contains a +# reference to another variable, e.g. +# +# MY_VAR='\${ANOTHER_VAR}' +# +# In this case, we do nothing since it does not make sense to check +# whether var_value is a valid value (since its contents have not yet +# been expanded). If var_value doesn't contain a dollar sign, it must +# contain a literal string. In this case, we check whether it is equal +# to one of the elements of the array valid_var_values. If not, we +# print out an error message and exit the calling script. # #----------------------------------------------------------------------- # - is_element_of "valid_var_values" "${var_value}" || { \ - valid_var_values_str=$(printf "\"%s\" " "${valid_var_values[@]}"); - print_err_msg_exit "\ + if [[ "${var_value}" != *'$'* ]]; then + is_element_of "valid_var_values" "${var_value}" || { \ + valid_var_values_str=$(printf "\"%s\" " "${valid_var_values[@]}"); + print_err_msg_exit "\ ${err_msg} ${var_name} must be set to one of the following: ${valid_var_values_str}"; \ - } + } + fi # #----------------------------------------------------------------------- # -# Restore the shell options saved at the beginning of this script/func- -# tion. +# Restore the shell options saved at the beginning of this script/function. # #----------------------------------------------------------------------- # diff --git a/ush/bash_utils/get_bash_file_contents.sh b/ush/bash_utils/get_bash_file_contents.sh new file mode 100644 index 0000000000..3b3ab7b30d --- /dev/null +++ b/ush/bash_utils/get_bash_file_contents.sh @@ -0,0 +1,71 @@ +# +#----------------------------------------------------------------------- +# +# This file defines a function that returns the contents of a bash script/ +# function with all empty lines, comment lines, and leading and trailing +# whitespace removed. Arguments are as follows: +# +# fp: +# The relative or full path to the file containing the bash script or +# function. +# +# output_varname_contents: +# Name of the output variable that will contain the (processed) contents +# of the file. This is the output of the function. +# +#----------------------------------------------------------------------- +# +function get_bash_file_contents() { + + { save_shell_opts; set -u +x; } > /dev/null 2>&1 + + local valid_args=( \ + "fp" \ + "output_varname_contents" \ + ) + process_args valid_args "$@" + print_input_args "valid_args" + # + # Verify that the required arguments to this function have been specified. + # If not, print out an error message and exit. + # + if [ -z "$fp" ]; then + print_err_msg_exit "\ +The argument \"fp\" specifying the relative or full path to the file to +read was not specified in the call to this function: + fp = \"$fp\"" + fi + + local contents \ + crnt_line + # + # Read in all lines in the file. In doing so: + # + # 1) Concatenate any line ending with the bash line continuation character + # (a backslash) with the following line. + # 2) Remove any leading and trailing whitespace. + # + # Note that these two actions are automatically performed by the "read" + # utility in the while-loop below. + # + contents="" + while read crnt_line; do + contents="${contents}${crnt_line} +" + done < "$fp" + # + # Strip out any comment and empty lines from contents. + # + contents=$( printf "${contents}" | \ + $SED -r -e "/^#.*/d" `# Remove comment lines.` \ + -e "/^$/d" `# Remove empty lines.` \ + ) + # + # Set output variables. + # + printf -v ${output_varname_contents} "${contents}" + + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/bash_utils/print_input_args.sh b/ush/bash_utils/print_input_args.sh index 957cec5243..d5ba5bd53c 100644 --- a/ush/bash_utils/print_input_args.sh +++ b/ush/bash_utils/print_input_args.sh @@ -169,7 +169,7 @@ have been set as follows: #----------------------------------------------------------------------- # # If a global variable named DEBUG is not defined, print out the message. -# If it is defined, print out the message only if DEBUG is set to TRUE. +# If it is defined, print out the message only if DEBUG is set to "TRUE". # #----------------------------------------------------------------------- # diff --git a/ush/check_expt_config_vars.sh b/ush/check_expt_config_vars.sh new file mode 100644 index 0000000000..53ce13a094 --- /dev/null +++ b/ush/check_expt_config_vars.sh @@ -0,0 +1,110 @@ +# +#----------------------------------------------------------------------- +# +# This file defines a function that checks that all experiment variables +# set in the user-specified experiment configuration file are defined (by +# being assigned default values) in the default experiment configuration +# file. If a variable is found in the former that is not defined in the +# latter, this function exits with an error message. +# +# This check is performed in order to prevent the user from defining +# arbitrary variables in the user-specified configuration file; the +# latter should be used to specify only varaibles that have already been +# defined in the default configuration file. +# +# Arguments are as follows: +# +# default_config_fp: +# The relative or full path to the default experiment configuration file. +# +# config_fp: +# The relative or full path to the user-specified experiment configuration +# file. +# +#----------------------------------------------------------------------- +# +function check_expt_config_vars() { + + . ${scrfunc_dir}/source_util_funcs.sh + + { save_shell_opts; set -u +x; } > /dev/null 2>&1 + + local valid_args=( \ + "default_config_fp" \ + "config_fp" \ + ) + process_args valid_args "$@" + print_input_args "valid_args" + + local var_list_default \ + var_list_user \ + crnt_line \ + var_name \ + regex_search + # + # Get the list of variable definitions, first from the default experiment + # configuration file and then from the user-specified experiment + # configuration file. + # + get_bash_file_contents fp="${default_config_fp}" \ + output_varname_contents="var_list_default" + + get_bash_file_contents fp="${config_fp}" \ + output_varname_contents="var_list_user" + # + # Loop through each line/variable in var_list_user. For each line, + # extract the the name of the variable that is being set (say VAR) and + # check that this variable is set somewhere in the default configuration + # file by verifying that a line that starts with "VAR=" exists in + # var_list_default. + # + while read crnt_line; do + # + # Note that a variable name will be found only if the equal sign immediately + # follows the variable name. + # + var_name=$( printf "%s" "${crnt_line}" | $SED -n -r -e "s/^([^ =\"]*)=.*/\1/p") + + if [ -z "${var_name}" ]; then + + print_info_msg " +The current line (crnt_line) of the user-specified experiment configuration +file (config_fp) does not contain a variable name (i.e. var_name is empty): + config_fp = \"${config_fp}\" + crnt_line = \"${crnt_line}\" + var_name = \"${var_name}\" +Skipping to next line." + + else + # + # Use grep to search for the variable name (followed by an equal sign, + # all at the beginning of a line) in the list of variables in the default + # configuration file. + # + # Note that we use a herestring to input into grep the list of variables + # in the default configuration file. grep will return with a zero status + # if the specified string (regex_search) is not found in the default + # variables list and a nonzero status otherwise. Note also that we + # redirect the output of grep to null because we are only interested in + # its exit status. + # + regex_search="^${var_name}=" + grep "${regex_search}" <<< "${var_list_default}" > /dev/null 2>&1 || \ + print_err_msg_exit "\ +The variable (var_name) defined on the current line (crnt_line) of the +user-specified experiment configuration file (config_fp) does not appear +in the default experiment configuration file (default_config_fp): + config_fp = \"${config_fp}\" + default_config_fp = \"${default_config_fp}\" + crnt_line = \"${crnt_line}\" + var_name = \"${var_name}\" +Please assign a default value to this variable in the default configuration +file and rerun." + + fi + + done <<< "${var_list_user}" + + { restore_shell_opts; } > /dev/null 2>&1 + +} diff --git a/ush/compare_config_scripts.sh b/ush/compare_config_scripts.sh deleted file mode 100644 index 791fa4e340..0000000000 --- a/ush/compare_config_scripts.sh +++ /dev/null @@ -1,151 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines and then calls a function that checks that all vari- -# ables defined in the user-specified experiment/workflow configuration -# file (whose file name is stored in the variable EXPT_CONFIG_FN) are -# also assigned default values in the default configuration file (whose -# file name is stored in the variable EXPT_DEFAULT_CONFIG_FN). -# -#----------------------------------------------------------------------- -# -function compare_config_scripts() { -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -local scrfunc_fn=$( basename "${scrfunc_fp}" ) -local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# -local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Source bash utility functions. -# -#----------------------------------------------------------------------- -# -. ${scrfunc_dir}/source_util_funcs.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Create a list of variable settings in the default workflow/experiment -# file (script) by stripping out comments, blank lines, extraneous lead- -# ing whitespace, etc from that file and saving the result in the varia- -# ble var_list_default. Each line of var_list_default will have the -# form -# -# VAR=... -# -# where the VAR is a variable name and ... is the value (including any -# trailing comments). Then create an equivalent list for the local con- -# figuration file and save the result in var_list_local. -# -#----------------------------------------------------------------------- -# -var_list_default=$( \ -$SED -r \ - -e "s/^([ ]*)([^ ]+.*)/\2/g" \ - -e "/^#.*/d" \ - -e "/^$/d" \ - ${EXPT_DEFAULT_CONFIG_FN} \ -) - -var_list_local=$( \ -$SED -r \ - -e "s/^([ ]*)([^ ]+.*)/\2/g" \ - -e "/^#.*/d" \ - -e "/^$/d" \ - ${EXPT_CONFIG_FN} \ -) -# -#----------------------------------------------------------------------- -# -# Loop through each line of var_list_local. For each line, extract the -# the name of the variable that is being set (say VAR) and check that -# this variable is set somewhere in the default configuration file by -# verifying that a line that starts with "VAR=" exists in var_list_de- -# fault. -# -#----------------------------------------------------------------------- -# -while read crnt_line; do -# -# Note that a variable name will be found only if the equal sign immed- -# iately follows the variable name. -# - var_name=$( printf "%s" "${crnt_line}" | $SED -n -r -e "s/^([^ =\"]*)=.*/\1/p") - - if [ -z "${var_name}" ]; then - - print_info_msg " -Current line (crnt_line) of user-specified experiment/workflow configu- -ration file (EXPT_CONFIG_FN) does not contain a variable name (i.e. -var_name is empty): - EXPT_CONFIG_FN = \"${EXPT_CONFIG_FN}\" - crnt_line = \"${crnt_line}\" - var_name = \"${var_name}\" -Skipping to next line." - - else -# -# Use a herestring to input list of variables in the default configura- -# tion file to grep. Also, redirect the output to null because we are -# only interested in the exit status of grep (which will be nonzero if -# the specified regex was not found in the list).. -# - grep "^${var_name}=" <<< "${var_list_default}" > /dev/null 2>&1 || \ - print_err_msg_exit "\ -The variable specified by var_name in the user-specified experiment/ -workflow configuration file (EXPT_CONFIG_FN) does not appear in the de- -fault experiment/workflow configuration file (EXPT_DEFAULT_CONFIG_FN): - EXPT_CONFIG_FN = \"${EXPT_CONFIG_FN}\" - EXPT_DEFAULT_CONFIG_FN = \"${EXPT_DEFAULT_CONFIG_FN}\" - var_name = \"${var_name}\" -Please assign a default value to this variable in the default configura- -tion file and rerun." - - fi - -done <<< "${var_list_local}" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - -} -# -#----------------------------------------------------------------------- -# -# Call the function defined above. -# -#----------------------------------------------------------------------- -# -compare_config_scripts - diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 21d6096a16..afb9428567 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -152,13 +152,14 @@ QUEUE_FCST="" # will be ignored unless WORKFLOW_MANAGER="none". Definitions: # # RUN_CMD_UTILS: -# The run command for pre-processing utilities (shave, orog, sfc_climo_gen, etc.) -# Can be left blank for smaller domains, in which case the executables will run -# without MPI. +# The run command for pre-processing utilities (shave, orog, sfc_climo_gen, +# etc.) Can be left blank for smaller domains, in which case the executables +# will run without MPI. # # RUN_CMD_FCST: -# The run command for the model forecast step. This will be appended to the end -# of the variable definitions file, so it can reference other variables. +# The run command for the model forecast step. This will be appended to +# the end of the variable definitions file, so it can reference other +# variables. # # RUN_CMD_POST: # The run command for post-processing (UPP). Can be left blank for smaller @@ -167,7 +168,7 @@ QUEUE_FCST="" #----------------------------------------------------------------------- # RUN_CMD_UTILS="mpirun -np 1" -RUN_CMD_FCST="mpirun -np \${PE_MEMBER01}" +RUN_CMD_FCST='mpirun -np \${PE_MEMBER01}' RUN_CMD_POST="mpirun -np 1" # #----------------------------------------------------------------------- @@ -295,7 +296,7 @@ PTMP="/base/path/of/directory/containing/postprocessed/output/files" # #----------------------------------------------------------------------- # -# Set the sparator character(s) to use in the names of the grid, mosaic, +# Set the separator character(s) to use in the names of the grid, mosaic, # and orography fixed files. # # Ideally, the same separator should be used in the names of these fixed @@ -732,6 +733,7 @@ NOMADS_file_type="nemsio" # directory or the cycle directories under it. # #----------------------------------------------------------------------- +# CCPP_PHYS_SUITE="FV3_GFS_v15p2" # #----------------------------------------------------------------------- @@ -1754,9 +1756,9 @@ NUM_ENS_MEMBERS="1" # #----------------------------------------------------------------------- # -DO_SHUM="false" -DO_SPPT="false" -DO_SKEB="false" +DO_SHUM="FALSE" +DO_SPPT="FALSE" +DO_SKEB="FALSE" SHUM_MAG="0.006" #Variable "shum" in input.nml SHUM_LSCALE="150000" SHUM_TSCALE="21600" #Variable "shum_tau" in input.nml @@ -1770,7 +1772,7 @@ SKEB_LSCALE="150000" SKEB_TSCALE="21600" #Variable "skeb_tau" in input.nml SKEB_INT="3600" #Variable "skebint" in input.nml SKEB_VDOF="10" -USE_ZMTNBLCK="false" +USE_ZMTNBLCK="FALSE" # #----------------------------------------------------------------------- # @@ -1787,7 +1789,7 @@ USE_ZMTNBLCK="false" # #----------------------------------------------------------------------- # -DO_SPP="false" +DO_SPP="FALSE" SPP_VAR_LIST=( "pbl" ) SPP_MAG_LIST=( "0.2" ) #Variable "spp_prt_list" in input.nml SPP_LSCALE=( "150000.0" ) @@ -1902,6 +1904,3 @@ OMP_STACKSIZE_RUN_FCST="1024m" KMP_AFFINITY_RUN_POST="scatter" OMP_NUM_THREADS_RUN_POST="1" OMP_STACKSIZE_RUN_POST="1024m" -# -#----------------------------------------------------------------------- -# diff --git a/ush/generate_FV3LAM_wflow.sh b/ush/generate_FV3LAM_wflow.sh index db65baec21..4776c3d455 100755 --- a/ush/generate_FV3LAM_wflow.sh +++ b/ush/generate_FV3LAM_wflow.sh @@ -1107,8 +1107,8 @@ if [[ $retval == 0 ]]; then # else printf " -Experiment/workflow generation failed. Check the log file from the ex- -periment/workflow generation script in the file specified by log_fp: +Experiment generation failed. Check the log file from the experiment +generation script in the file specified by log_fp: log_fp = \"${log_fp}\" Stopping. " diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh index 76c964083a..4c881be2db 100755 --- a/ush/launch_FV3LAM_wflow.sh +++ b/ush/launch_FV3LAM_wflow.sh @@ -18,7 +18,13 @@ set -u #----------------------------------------------------------------------- # if [[ $(uname -s) == Darwin ]]; then - command -v greadlink >/dev/null 2>&1 || { echo >&2 "For Darwin-based operating systems (MacOS), the 'greadlink' utility is required to run the UFS SRW Application. Reference the User's Guide for more information about platform requirements. Aborting."; exit 1; } + command -v greadlink >/dev/null 2>&1 || { \ + echo >&2 "\ +For Darwin-based operating systems (MacOS), the 'greadlink' utility is +required to run the UFS SRW Application. Reference the User's Guide for +more information about platform requirements. Aborting."; \ + exit 1; \ + } scrfunc_fp=$( greadlink -f "${BASH_SOURCE[0]}" ) else scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) @@ -28,27 +34,26 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) ushdir="${scrfunc_dir}" . $ushdir/source_util_funcs.sh - # #----------------------------------------------------------------------- # # Get the experiment directory. We assume that there is a symlink to # this script in the experiment directory, and this script is called via -# that symlink. Thus, finding the directory in which the symlink is lo- -# cated will give us the experiment directory. We find this by first +# that symlink. Thus, finding the directory in which the symlink is +# located will give us the experiment directory. We find this by first # obtaining the directory portion (i.e. the portion without the name of # this script) of the command that was used to called this script (i.e. -# "$0") and then use the "readlink -f" command to obtain the correspond- -# ing absolute path. This will work for all four of the following ways -# in which the symlink in the experiment directory pointing to this -# script may be called: +# "$0") and then use the "readlink -f" command to obtain the corresponding +# absolute path. This will work for all four of the following ways in +# which the symlink in the experiment directory pointing to this script +# may be called: # # 1) Call this script from the experiment directory: # > cd /path/to/experiment/directory # > launch_FV3LAM_wflow.sh # -# 2) Call this script from the experiment directory but using "./" be- -# fore the script name: +# 2) Call this script from the experiment directory but using "./" before +# the script name: # > cd /path/to/experiment/directory # > ./launch_FV3LAM_wflow.sh # @@ -56,9 +61,8 @@ ushdir="${scrfunc_dir}" # symlink in the experiment directory: # > /path/to/experiment/directory/launch_FV3LAM_wflow.sh # -# 4) Call this script from a directory that is several levels up from -# the experiment directory (but not necessarily at the root directo- -# ry): +# 4) Call this script from a directory that is several levels up from the +# experiment directory (but not necessarily at the root directory): # > cd /path/to # > experiment/directory/launch_FV3LAM_wflow.sh # @@ -75,7 +79,13 @@ ushdir="${scrfunc_dir}" # exptdir=$( dirname "$0" ) if [[ $(uname -s) == Darwin ]]; then - command -v greadlink >/dev/null 2>&1 || { echo >&2 "For Darwin-based operating systems (MacOS), the 'greadlink' utility is required to run the UFS SRW Application. Reference the User's Guide for more information about platform requirements. Aborting."; exit 1; } + command -v greadlink >/dev/null 2>&1 || { \ + echo >&2 "\ +For Darwin-based operating systems (MacOS), the 'greadlink' utility is +required to run the UFS SRW Application. Reference the User's Guide for +more information about platform requirements. Aborting."; + exit 1; + } exptdir=$( greadlink -f "$exptdir" ) else exptdir=$( readlink -f "$exptdir" ) @@ -106,14 +116,12 @@ expt_name="${EXPT_SUBDIR}" # #----------------------------------------------------------------------- # -machine=$(echo_lowercase $MACHINE) -env_fn=${WFLOW_ENV_FN:-"wflow_${machine}.env"} -env_fp="${SR_WX_APP_TOP_DIR}/env/${env_fn}" +env_fp="${SR_WX_APP_TOP_DIR}/env/${WFLOW_ENV_FN}" module purge source "${env_fp}" || print_err_msg_exit "\ - Sourcing platform-specific environment file (env_fp) for -the workflow task failed : -env_fp = \"${env_fp}\"" +Sourcing platform-specific environment file (env_fp) for the workflow +task failed: + env_fp = \"${env_fp}\"" # #----------------------------------------------------------------------- # @@ -141,20 +149,19 @@ wflow_status="IN PROGRESS" # #----------------------------------------------------------------------- # -cd "$exptdir" +cd_vrfy "$exptdir" # #----------------------------------------------------------------------- # -# Issue the rocotorun command to (re)launch the next task in the -# workflow. Then check for error messages in the output of rocotorun. -# If any are found, it means the end-to-end run of the workflow failed. -# In this case, we remove the crontab entry that launches the workflow, -# and we append an appropriate failure message at the end of the launch -# log file. +# Issue the rocotorun command to (re)launch the next task in the workflow. +# Then check for error messages in the output of rocotorun. If any are +# found, it means the end-to-end run of the workflow failed. In this +# case, we remove the crontab entry that launches the workflow, and we +# append an appropriate failure message at the end of the launch log +# file. # #----------------------------------------------------------------------- # - tmp_fn="rocotorun_output.txt" rocotorun_cmd="rocotorun -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" eval ${rocotorun_cmd} > ${tmp_fn} 2>&1 @@ -239,15 +246,15 @@ ${rocotostat_output} # ... # # Thus, the first row is a header line containing the column titles, and -# the remaining rows each correspond to one cycle in the workflow. Be- -# low, we are interested in the first and second columns of each row. -# The first column is a string containing the start time of the cycle -# (in the format YYYYMMDDHHmm, where YYYY is the 4-digit year, MM is the -# 2-digit month, DD is the 2-digit day of the month, HH is the 2-digit -# hour of the day, and mm is the 2-digit minute of the hour). The se- -# cond column is a string containing the state of the cycle. This can -# be "Active" or "Done". Below, we read in and store these two columns -# in (1-D) arrays. +# the remaining rows each correspond to one cycle in the workflow. Below, +# we are interested in the first and second columns of each row. The +# first column is a string containing the start time of the cycle (in the +# format YYYYMMDDHHmm, where YYYY is the 4-digit year, MM is the 2-digit +# month, DD is the 2-digit day of the month, HH is the 2-digit hour of +# the day, and mm is the 2-digit minute of the hour). The second column +# is a string containing the state of the cycle. This can be "Active" +# or "Done". Below, we read in and store these two columns in (1-D) +# arrays. # #----------------------------------------------------------------------- # @@ -259,9 +266,9 @@ cycle_status=() i=0 while read -r line; do # -# Note that the first line in rocotostat_output is a header line con- -# taining the column titles. Thus, we ignore it and consider only the -# remaining lines (of which there is one per cycle). +# Note that the first line in rocotostat_output is a header line containing +# the column titles. Thus, we ignore it and consider only the remaining +# lines (of which there is one per cycle). # if [ $i -gt 0 ]; then im1=$((i-1)) @@ -289,9 +296,9 @@ done # #----------------------------------------------------------------------- # -# If the number of completed cycles is equal to the total number of cy- -# cles, it means the end-to-end run of the workflow was successful. In -# this case, we reset the wflow_status to "SUCCESS". +# If the number of completed cycles is equal to the total number of cycles, +# it means the end-to-end run of the workflow was successful. In this +# case, we reset the wflow_status to "SUCCESS". # #----------------------------------------------------------------------- # @@ -301,8 +308,8 @@ fi # #----------------------------------------------------------------------- # -# Print informational messages about the workflow to the launch log -# file, including the workflow status. +# Print informational messages about the workflow to the launch log file, +# including the workflow status. # #----------------------------------------------------------------------- # @@ -333,25 +340,24 @@ if [ "${wflow_status}" = "SUCCESS" ] || \ msg=" The end-to-end run of the workflow for the forecast experiment specified -by expt_name has completed with the following workflow status (wflow_- -status): +by expt_name has completed with the following workflow status (wflow_status): expt_name = \"${expt_name}\" wflow_status = \"${wflow_status}\" " # # If a cron job was being used to periodically relaunch the workflow, we -# now remove the entry in the crontab corresponding to the workflow be- -# cause the end-to-end run of the workflow has now either succeeded or +# now remove the entry in the crontab corresponding to the workflow +# because the end-to-end run of the workflow has now either succeeded or # failed and will remain in that state without manual user intervention. -# Thus, there is no need to try to relaunch it. We also append a mes- -# sage to the completion message above to indicate this. +# Thus, there is no need to try to relaunch it. We also append a message +# to the completion message above to indicate this. # if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then msg="${msg}\ -Thus, there is no need to relaunch the workflow via a cron job. Remo- -ving from the crontab the line (CRONTAB_LINE) that calls the workflow -launch script for this experiment: +Thus, there is no need to relaunch the workflow via a cron job. Removing +from the crontab the line (CRONTAB_LINE) that calls the workflow launch +script for this experiment: CRONTAB_LINE = \"${CRONTAB_LINE}\" " # @@ -364,12 +370,13 @@ launch script for this experiment: $SED -r -e "s%[*]%\\\\*%g" ) # # In the string passed to the grep command below, we use the line start -# and line end anchors ("^" and "$", respectively) to ensure that we on- -# ly find lines in the crontab that contain exactly the string in cron- -# tab_line_esc_astr without any leading or trailing characters. +# and line end anchors ("^" and "$", respectively) to ensure that we +# only find lines in the crontab that contain exactly the string in +# crontab_line_esc_astr without any leading or trailing characters. # if [ "$MACHINE" = "WCOSS_DELL_P3" ];then - grep -v "^${crontab_line_esc_astr}$" "/u/$USER/cron/mycrontab" > tmpfile && mv tmpfile "/u/$USER/cron/mycrontab" + grep -v "^${crontab_line_esc_astr}$" "/u/$USER/cron/mycrontab" \ + > tmpfile && mv_vrfy tmpfile "/u/$USER/cron/mycrontab" else ( crontab -l | grep -v "^${crontab_line_esc_astr}$" ) | crontab - fi @@ -388,7 +395,3 @@ launch script for this experiment: fi fi - - - - diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 1f0d881260..17b6d5d3d4 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -135,8 +135,7 @@ jjob_fp="$2" module purge machine=$(echo_lowercase $MACHINE) -env_fn=${BUILD_ENV_FN:-"build_${machine}_${COMPILER}.env"} -env_fp="${SR_WX_APP_TOP_DIR}/env/${env_fn}" +env_fp="${SR_WX_APP_TOP_DIR}/env/${BUILD_ENV_FN}" source "${env_fp}" || print_err_msg_exit "\ Sourcing platform- and compiler-specific environment file (env_fp) for the workflow task specified by task_name failed: diff --git a/ush/setup.sh b/ush/setup.sh index 88dcab21e5..d8db064c69 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -63,6 +63,7 @@ Starting function ${func_name}() in \"${scrfunc_fn}\"... # #----------------------------------------------------------------------- # +. ./check_expt_config_vars.sh . ./set_cycle_dates.sh . ./set_gridparams_GFDLgrid.sh . ./set_gridparams_ESGgrid.sh @@ -111,7 +112,9 @@ if [ -f "${EXPT_CONFIG_FN}" ]; then # configuration file are also assigned default values in the default # configuration file. # - . ./compare_config_scripts.sh + check_expt_config_vars \ + default_config_fp="./${EXPT_DEFAULT_CONFIG_FN}" \ + config_fp="./${EXPT_CONFIG_FN}" # # Now source the user-specified configuration file. # @@ -698,12 +701,27 @@ fi # #----------------------------------------------------------------------- # -# Calculate PPN_RUN_FCST from NCORES_PER_NODE and OMP_NUM_THREADS_RUN_FCST +# Set the names of the build and workflow environment files (if not +# already specified by the user). These are the files that need to be +# sourced before building the component SRW App codes and running various +# workflow scripts, respectively. # #----------------------------------------------------------------------- # -PPN_RUN_FCST_OPT="$(( ${NCORES_PER_NODE} / ${OMP_NUM_THREADS_RUN_FCST} ))" -PPN_RUN_FCST=${PPN_RUN_FCST:-${PPN_RUN_FCST_OPT}} +machine=$(echo_lowercase ${MACHINE}) +WFLOW_ENV_FN=${WFLOW_ENV_FN:-"wflow_${machine}.env"} +BUILD_ENV_FN=${BUILD_ENV_FN:-"build_${machine}_${COMPILER}.env"} +# +#----------------------------------------------------------------------- +# +# Calculate a default value for the number of processes per node for the +# RUN_FCST_TN task. Then set PPN_RUN_FCST to this default value if +# PPN_RUN_FCST is not already specified by the user. +# +#----------------------------------------------------------------------- +# +ppn_run_fcst_default="$(( ${NCORES_PER_NODE} / ${OMP_NUM_THREADS_RUN_FCST} ))" +PPN_RUN_FCST=${PPN_RUN_FCST:-${ppn_run_fcst_default}} # #----------------------------------------------------------------------- # @@ -882,32 +900,32 @@ fi # #----------------------------------------------------------------------- # -CYCL_HRS_str=$(printf "\"%s\" " "${CYCL_HRS[@]}") -CYCL_HRS_str="( $CYCL_HRS_str)" +cycl_hrs_str=$(printf "\"%s\" " "${CYCL_HRS[@]}") +cycl_hrs_str="( ${cycl_hrs_str})" i=0 -for CYCL in "${CYCL_HRS[@]}"; do +for cycl_hr in "${CYCL_HRS[@]}"; do - CYCL_OR_NULL=$( printf "%s" "$CYCL" | $SED -n -r -e "s/^([0-9]{2})$/\1/p" ) + cycl_hr_or_null=$( printf "%s" "${cycl_hr}" | $SED -n -r -e "s/^([0-9]{2})$/\1/p" ) - if [ -z "${CYCL_OR_NULL}" ]; then + if [ -z "${cycl_hr_or_null}" ]; then print_err_msg_exit "\ Each element of CYCL_HRS must be a string consisting of exactly 2 digits -(including a leading \"0\", if necessary) specifying an hour-of-day. Ele- -ment #$i of CYCL_HRS (where the index of the first element is 0) does not -have this form: - CYCL_HRS = $CYCL_HRS_str +(including a leading \"0\", if necessary) specifying an hour-of-day. +Element #$i of CYCL_HRS (where the index of the first element is 0) does +not have this form: + CYCL_HRS = ${cycl_hrs_str} CYCL_HRS[$i] = \"${CYCL_HRS[$i]}\"" fi - if [ "${CYCL_OR_NULL}" -lt "0" ] || \ - [ "${CYCL_OR_NULL}" -gt "23" ]; then + if [ "${cycl_hr_or_null}" -lt "0" ] || \ + [ "${cycl_hr_or_null}" -gt "23" ]; then print_err_msg_exit "\ -Each element of CYCL_HRS must be an integer between \"00\" and \"23\", in- -clusive (including a leading \"0\", if necessary), specifying an hour-of- -day. Element #$i of CYCL_HRS (where the index of the first element is 0) -does not have this form: - CYCL_HRS = $CYCL_HRS_str +Each element of CYCL_HRS must be an integer between \"00\" and \"23\", +inclusive (including a leading \"0\", if necessary), specifying an hour- +of-day. Element #$i of CYCL_HRS (where the index of the first element +is 0) does not have this form: + CYCL_HRS = ${cycl_hrs_str} CYCL_HRS[$i] = \"${CYCL_HRS[$i]}\"" fi @@ -929,7 +947,7 @@ if [ "${INCR_CYCL_FREQ}" -lt "24" ] && [ "$i" -gt "1" ]; then The number of CYCL_HRS does not match with that expected by INCR_CYCL_FREQ: INCR_CYCL_FREQ = ${INCR_CYCL_FREQ} cycle interval by the number of CYCL_HRS = ${cycl_intv} - CYCL_HRS = $CYCL_HRS_str " + CYCL_HRS = ${cycl_hrs_str}" fi im1=$(( $i-1 )) @@ -941,7 +959,7 @@ The number of CYCL_HRS does not match with that expected by INCR_CYCL_FREQ: print_err_msg_exit "\ Element #${itmp} of CYCL_HRS does not match with the increment of cycle frequency INCR_CYCL_FREQ: - CYCL_HRS = $CYCL_HRS_str + CYCL_HRS = ${cycl_hrs_str} INCR_CYCL_FREQ = ${INCR_CYCL_FREQ} CYCL_HRS[$itmp] = \"${CYCL_HRS[$itmp]}\"" fi @@ -961,7 +979,7 @@ fi set_cycle_dates \ date_start="${DATE_FIRST_CYCL}" \ date_end="${DATE_LAST_CYCL}" \ - cycle_hrs="${CYCL_HRS_str}" \ + cycle_hrs="${cycl_hrs_str}" \ incr_cycl_freq="${INCR_CYCL_FREQ}" \ output_varname_all_cdates="ALL_CDATES" @@ -1000,8 +1018,7 @@ fi # Directory containing various executable files. # # TEMPLATE_DIR: -# Directory in which templates of various FV3-LAM input files are locat- -# ed. +# Directory in which templates of various FV3-LAM input files are located. # # UFS_WTHR_MDL_DIR: # Directory in which the (NEMS-enabled) FV3-LAM application is located. @@ -1118,7 +1135,7 @@ One or more fix file directories have not been specified for this machine: TOPO_DIR = \"${TOPO_DIR:-\"\"} SFC_CLIMO_INPUT_DIR = \"${SFC_CLIMO_INPUT_DIR:-\"\"} FIXLAM_NCO_BASEDIR = \"${FIXLAM_NCO_BASEDIR:-\"\"} -You can specify the missing location(s) in config.sh" +You can specify the missing location(s) in ${EXPT_CONFIG_FN}." fi ;; @@ -1208,7 +1225,7 @@ check_var_valid_value \ # Set USE_CUSTOM_POST_CONFIG_FILE to either "TRUE" or "FALSE" so we don't # have to consider other valid values later on. # -USE_CUSTOM_POST_CONFIG_FILE=$(echo_uppercase $USE_CUSTOM_POST_CONFIG_FILE) +USE_CUSTOM_POST_CONFIG_FILE=$(echo_uppercase ${USE_CUSTOM_POST_CONFIG_FILE}) if [ "$USE_CUSTOM_POST_CONFIG_FILE" = "TRUE" ] || \ [ "$USE_CUSTOM_POST_CONFIG_FILE" = "YES" ]; then USE_CUSTOM_POST_CONFIG_FILE="TRUE" @@ -1441,7 +1458,7 @@ must set DT_SUBHOURLY_POST_MNTS to something other than zero." # # For now, the sub-hourly capability is restricted to having values of # DT_SUBHOURLY_POST_MNTS that evenly divide into 60 minutes. This is -# because the jinja rocoto XML template (FV3LAM_wflow.xml) assumes that +# because the jinja rocoto XML template (${WFLOW_XML_FN}) assumes that # model output is generated at the top of every hour (i.e. at 00 minutes). # This restricts DT_SUBHOURLY_POST_MNTS to the following values (inluding # both cases with and without a leading 0): @@ -1574,8 +1591,8 @@ fi # #----------------------------------------------------------------------- # -# The FV3 forecast model needs the following input files in the run di- -# rectory to start a forecast: +# The FV3 forecast model needs the following input files in the run +# directory to start a forecast: # # (1) The data table file # (2) The diagnostics table file @@ -1583,27 +1600,22 @@ fi # (4) The FV3 namelist file # (5) The model configuration file # (6) The NEMS configuration file -# -# If using CCPP, it also needs: -# # (7) The CCPP physics suite definition file # # The workflow contains templates for the first six of these files. # Template files are versions of these files that contain placeholder -# (i.e. dummy) values for various parameters. The experiment/workflow -# generation scripts copy these templates to appropriate locations in -# the experiment directory (either the top of the experiment directory -# or one of the cycle subdirectories) and replace the placeholders in -# these copies by actual values specified in the experiment/workflow -# configuration file (or derived from such values). The scripts then -# use the resulting "actual" files as inputs to the forecast model. -# -# Note that the CCPP physics suite defintion file does not have a cor- -# responding template file because it does not contain any values that -# need to be replaced according to the experiment/workflow configura- -# tion. If using CCPP, this file simply needs to be copied over from -# its location in the forecast model's directory structure to the ex- -# periment directory. +# (i.e. dummy) values for various parameters. The experiment generation +# and/or the forecast task (i.e. J-job) scripts copy these templates to +# appropriate locations in the experiment directory (e.g. to the top of +# the experiment directory, to one of the cycle subdirectories, etc) and +# replace the placeholders with actual values to obtain the files that +# are used as inputs to the forecast model. +# +# Note that the CCPP physics suite defintion file (SDF) does not have a +# corresponding template file because it does not contain any values +# that need to be replaced according to the experiment configuration. +# This file simply needs to be copied over from its location in the +# forecast model's directory structure to the experiment directory. # # Below, we first set the names of the templates for the first six files # listed above. We then set the full paths to these template files. @@ -2057,7 +2069,6 @@ Reset value is:" print_info_msg "$msg" fi - # #----------------------------------------------------------------------- # @@ -2181,7 +2192,6 @@ fi #----------------------------------------------------------------------- # . ./set_extrn_mdl_params.sh - # #----------------------------------------------------------------------- # @@ -2536,7 +2546,6 @@ fi #----------------------------------------------------------------------- # NNODES_RUN_FCST=$(( (PE_MEMBER01 + PPN_RUN_FCST - 1)/PPN_RUN_FCST )) - # #----------------------------------------------------------------------- # @@ -2584,179 +2593,106 @@ set_thompson_mp_fix_files \ # #----------------------------------------------------------------------- # -# Generate the shell script that will appear in the experiment directory -# (EXPTDIR) and will contain definitions of variables needed by the va- -# rious scripts in the workflow. We refer to this as the experiment/ -# workflow global variable definitions file. We will create this file -# by: -# -# 1) Copying the default workflow/experiment configuration file (speci- -# fied by EXPT_DEFAULT_CONFIG_FN and located in the shell script di- -# rectory specified by USHDIR) to the experiment directory and rena- -# ming it to the name specified by GLOBAL_VAR_DEFNS_FN. -# -# 2) Resetting the default variable values in this file to their current -# values. This is necessary because these variables may have been -# reset by the user-specified configuration file (if one exists in -# USHDIR) and/or by this setup script, e.g. because predef_domain is -# set to a valid non-empty value. -# -# 3) Appending to the variable definitions file any new variables intro- -# duced in this setup script that may be needed by the scripts that -# perform the various tasks in the workflow (and which source the va- -# riable defintions file). -# -# First, set the full path to the variable definitions file and copy the -# default configuration script into it. +# Set the full path to the experiment's variable definitions file. This +# file will contain definitions of variables (in bash syntax) needed by +# the various scripts in the workflow. # #----------------------------------------------------------------------- # -GLOBAL_VAR_DEFNS_FP="$EXPTDIR/$GLOBAL_VAR_DEFNS_FN" -cp_vrfy $USHDIR/${EXPT_DEFAULT_CONFIG_FN} ${GLOBAL_VAR_DEFNS_FP} +GLOBAL_VAR_DEFNS_FP="$EXPTDIR/${GLOBAL_VAR_DEFNS_FN}" # #----------------------------------------------------------------------- # -# +# Get the list of primary experiment variables and their default values +# from the default experiment configuration file (EXPT_DEFAULT_CONFIG_FN). +# By "primary", we mean those variables that are defined in the default +# configuration file and can be reset in the user-specified experiment +# configuration file (EXPT_CONFIG_FN). The default values will be updated +# below to user-specified ones and the result saved in the experiment's +# variable definitions file. # #----------------------------------------------------------------------- # +print_info_msg " +Creating list of default experiment variable definitions..." -# Read all lines of GLOBAL_VAR_DEFNS file into the variable line_list. -line_list=$( $SED -r -e "s/(.*)/\1/g" ${GLOBAL_VAR_DEFNS_FP} ) -# -# Loop through the lines in line_list and concatenate lines ending with -# the line bash continuation character "\". -# -rm_vrfy ${GLOBAL_VAR_DEFNS_FP} -while read crnt_line; do - printf "%s\n" "${crnt_line}" >> ${GLOBAL_VAR_DEFNS_FP} -done <<< "${line_list}" -# -#----------------------------------------------------------------------- -# -# The following comment block needs to be updated because now line_list -# may contain lines that are not assignment statements (e.g. it may con- -# tain if-statements). Such lines are ignored in the while-loop below. -# -# Reset each of the variables in the variable definitions file to its -# value in the current environment. To accomplish this, we: -# -# 1) Create a list of variable settings by stripping out comments, blank -# lines, extraneous leading whitespace, etc from the variable defini- -# tions file (which is currently identical to the default workflow/ -# experiment configuration script) and saving the result in the vari- -# able line_list. Each line of line_list will have the form -# -# VAR=... -# -# where the VAR is a variable name and ... is the value from the de- -# fault configuration script (which does not necessarily correspond -# to the current value of the variable). -# -# 2) Loop through each line of line_list. For each line, we extract the -# variable name (and save it in the variable var_name), get its value -# from the current environment (using bash indirection, i.e. -# ${!var_name}), and use the set_file_param() function to replace the -# value of the variable in the variable definitions script (denoted -# above by ...) with its current value. -# -#----------------------------------------------------------------------- -# -# Also should remove trailing whitespace... -line_list=$( $SED -r \ - -e "s/^([ ]*)([^ ]+.*)/\2/g" \ - -e "/^#.*/d" \ - -e "/^$/d" \ - ${GLOBAL_VAR_DEFNS_FP} ) +get_bash_file_contents fp="$USHDIR/${EXPT_DEFAULT_CONFIG_FN}" \ + output_varname_contents="default_var_defns" print_info_msg "$DEBUG" " -Before updating default values of experiment variables to user-specified -values, the variable \"line_list\" contains: +The variable \"default_var_defns\" containing default values of primary +experiment variables is set as follows: -${line_list} +${default_var_defns} " # #----------------------------------------------------------------------- # -# Add a comment at the beginning of the variable definitions file that -# indicates that the first section of that file is (mostly) the same as -# the configuration file. -# -#----------------------------------------------------------------------- -# -read -r -d '' str_to_insert << EOM +# Create a list of primary experiment variable definitions containing +# updated values. By "updated", we mean non-default values. Values +# may have been updated due to the presence of user-specified values in +# the experiment configuration file (EXPT_CONFIG_FN) or due to other +# considerations (e.g. resetting depending on the platform the App is +# running on). # #----------------------------------------------------------------------- -#----------------------------------------------------------------------- -# Section 1: -# This section is a copy of the default experiment configuration file -# (${EXPT_DEFAULT_CONFIG_FN}) in the shell scripts directory specified by USHDIR -# except that variable values have been updated to those for the experiment -# (as opposed to the default values). -#----------------------------------------------------------------------- -#----------------------------------------------------------------------- -# -EOM # -# Replace all occurrences of actual newlines in the variable str_to_insert -# with escaped backslash-n. This is needed for the sed command below to -# work properly (i.e. to avoid it failing with an "unterminated `s' command" -# error message). -# -str_to_insert=${str_to_insert//$'\n'/\\n} +print_info_msg " +Creating lists of (updated) experiment variable definitions..." # -# Insert str_to_insert into GLOBAL_VAR_DEFNS_FP right after the line -# containing the name of the interpreter (i.e. the line that starts with -# the string "#!", e.g. "#!/bin/bash"). +# Set the flag that specifies whether or not array variables will be +# recorded in the variable definitions file on one line or one element +# per line. Then, if writing arrays one element per line (i.e. multiline), +# set an escaped-newline character that needs to be included after every +# element of each array as the newline character in order for sed to +# write the line properly. # -regexp="(^#!.*)" -$SED -i -r -e "s|$regexp|\1\n\n${str_to_insert}\n|g" ${GLOBAL_VAR_DEFNS_FP} +multiline_arrays="TRUE" +#multiline_arrays="FALSE" +escbksl_nl_or_null="" +if [ "${multiline_arrays}" = "TRUE" ]; then + escbksl_nl_or_null='\\\n' +fi # -# Loop through the lines in line_list. +# Loop through the lines in default_var_defns. Reset the value of the +# variable on each line to the updated value (e.g. to a user-specified +# value, as opposed to the default value). The updated list of variables +# and values will be saved in var_defns. # -print_info_msg " -Generating the global experiment variable definitions file specified by -GLOBAL_VAR_DEFNS_FN: - GLOBAL_VAR_DEFNS_FN = \"${GLOBAL_VAR_DEFNS_FN}\" -Full path to this file is: - GLOBAL_VAR_DEFNS_FP = \"${GLOBAL_VAR_DEFNS_FP}\" -For more detailed information, set DEBUG to \"TRUE\" in the experiment -configuration file (\"${EXPT_CONFIG_FN}\")." - -template_var_names=() -template_var_values=() +var_defns="" while read crnt_line; do # # Try to obtain the name of the variable being set on the current line. -# This will be successful only if the line consists of one or more char- -# acters representing the name of a variable (recall that in generating -# the variable line_list, leading spaces on each line were stripped out), -# followed by an equal sign, followed by zero or more characters -# representing the value that the variable is being set to. +# This will be successful only if the line consists of one or more non- +# whitespace characters representing the name of a variable followed by +# an equal sign, followed by zero or more characters representing the +# value that the variable is being set to. (Recall that in generating +# the variable default_var_defns, leading spaces on each line were +# stripped out). # var_name=$( printf "%s" "${crnt_line}" | $SED -n -r -e "s/^([^ ]*)=.*/\1/p" ) # # If var_name is not empty, then a variable name was found on the current -# line in line_list. +# line in default_var_defns. # - if [ ! -z $var_name ]; then + if [ ! -z ${var_name} ]; then print_info_msg "$DEBUG" " var_name = \"${var_name}\"" # # If the variable specified in var_name is set in the current environment -# (to either an empty or non-empty string), get its value and insert it -# in the variable definitions file on the line where that variable is -# defined. Note that +# (to either an empty or non-empty string), get its value and save it in +# var_value. Note that # # ${!var_name+x} # # will retrun the string "x" if the variable specified in var_name is # set (to either an empty or non-empty string), and it will return an -# empty string if the variable specified in var_name is unset (i.e. un- -# defined). +# empty string if the variable specified in var_name is unset (i.e. if +# it is undefined). # - if [ ! -z ${!var_name+x} ]; then + unset "var_value" + if [ ! -z "${!var_name+x}" ]; then # # The variable may be a scalar or an array. Thus, we first treat it as # an array and obtain the number of elements that it contains. @@ -2765,49 +2701,40 @@ var_name = \"${var_name}\"" array=("${!array_name_at}") num_elems="${#array[@]}" # -# We will now set the variable var_value to the string that needs to be -# placed on the right-hand side of the assignment operator (=) on the -# appropriate line in the variable definitions file. How this is done -# depends on whether the variable is a scalar or an array. +# Set var_value to the updated value of the current experiment variable. +# How this is done depends on whether the variable is a scalar or an +# array. # # If the variable contains only one element, then it is a scalar. (It # could be a 1-element array, but for simplicity, we treat that case as # a scalar.) In this case, we enclose its value in double quotes and # save the result in var_value. # - if [ "$num_elems" -eq 1 ]; then + if [ "${num_elems}" -eq 1 ]; then + var_value="${!var_name}" - var_value="\"${var_value}\"" + rhs="'${var_value}'" # # If the variable contains more than one element, then it is an array. # In this case, we build var_value in two steps as follows: # # 1) Generate a string containing each element of the array in double -# quotes and followed by a space. +# quotes and followed by a space (and followed by an optional backslash +# and newline if multiline_arrays has been set to "TRUE"). # # 2) Place parentheses around the double-quoted list of array elements # generated in the first step. Note that there is no need to put a -# space before the closing parenthesis because in step 1, we have -# already placed a space after the last element. +# space before the closing parenthesis because during step 1 above, +# a space has already been placed after the last array element. # else - arrays_on_one_line="TRUE" - arrays_on_one_line="FALSE" - - if [ "${arrays_on_one_line}" = "TRUE" ]; then - var_value=$(printf "\"%s\" " "${!array_name_at}") -# var_value=$(printf "\"%s\" \\\\\\ \\\n" "${!array_name_at}") - else -# var_value=$(printf "%s" "\\\\\\n") - var_value="\\\\\n" - for (( i=0; i<${num_elems}; i++ )); do -# var_value=$(printf "%s\"%s\" %s" "${var_value}" "${array[$i]}" "\\\\\\n") - var_value="${var_value}\"${array[$i]}\" \\\\\n" -# var_value="${var_value}\"${array[$i]}\" " - done - fi - var_value="( $var_value)" + var_value="" + printf -v "var_value" "${escbksl_nl_or_null}" + for (( i=0; i<${num_elems}; i++ )); do + printf -v "var_value" "${var_value}\"${array[$i]}\" ${escbksl_nl_or_null}" + done + rhs="( ${var_value})" fi # @@ -2824,52 +2751,83 @@ The variable specified by \"var_name\" is not set in the current environment: var_name = \"${var_name}\" Setting its value in the variable definitions file to an empty string." - var_value="\"\"" + rhs="''" fi # -# Now place var_value on the right-hand side of the assignment statement -# on the appropriate line in the variable definitions file. +# Set the line containing the variable's definition. Then add the line +# to the list of all variable definitions. # - set_file_param "${GLOBAL_VAR_DEFNS_FP}" "${var_name}" "${var_value}" + var_defn="${var_name}=$rhs" + printf -v "var_defns" "${var_defns}${var_defn}\n" # # If var_name is empty, then a variable name was not found on the current -# line in line_list. In this case, print out a warning and move on to -# the next line. +# line in default_var_defns. In this case, print out a warning and move +# on to the next line. # else print_info_msg " -Could not extract a variable name from the current line in \"line_list\" +Could not extract a variable name from the current line in \"default_var_defns\" (probably because it does not contain an equal sign with no spaces on either side): crnt_line = \"${crnt_line}\" var_name = \"${var_name}\" -Continuing to next line in \"line_list\"." +Continuing to next line in \"default_var_defns\"." fi -done <<< "${line_list}" +done <<< "${default_var_defns}" # #----------------------------------------------------------------------- # -# Append additional variable definitions (and comments) to the variable -# definitions file. These variables have been set above using the vari- -# ables in the default and local configuration scripts. These variables -# are needed by various tasks/scripts in the workflow. +# Construct the experiment's variable definitions file. Below, we first +# record the contents we want to place in this file in the variable +# var_defns_file_contents, and we then write the contents of this +# variable to the file. # #----------------------------------------------------------------------- # -{ cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} +print_info_msg " +Generating the global experiment variable definitions file specified by +GLOBAL_VAR_DEFNS_FN: + GLOBAL_VAR_DEFNS_FN = \"${GLOBAL_VAR_DEFNS_FN}\" +Full path to this file is: + GLOBAL_VAR_DEFNS_FP = \"${GLOBAL_VAR_DEFNS_FP}\" +For more detailed information, set DEBUG to \"TRUE\" in the experiment +configuration file (\"${EXPT_CONFIG_FN}\")." +var_defns_file_contents="\ +# +#----------------------------------------------------------------------- +#----------------------------------------------------------------------- +# Section 1: +# This section contains (most of) the primary experiment variables, i.e. +# those variables that are defined in the default configuration file +# (${EXPT_DEFAULT_CONFIG_FN}) and that can be reset via the user-specified +# experiment configuration file (${EXPT_CONFIG_FN}). +#----------------------------------------------------------------------- +#----------------------------------------------------------------------- +# +${var_defns}" +# +# Append derived/secondary variable definitions (as well as comments) to +# the contents of the variable definitions file. +# +ensmem_names_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${ENSMEM_NAMES[@]}") +ensmem_names_str=$(printf "( %s${escbksl_nl_or_null})" "${ensmem_names_str}") + +fv3_nml_ensmem_fps_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${FV3_NML_ENSMEM_FPS[@]}") +fv3_nml_ensmem_fps_str=$(printf "( %s${escbksl_nl_or_null})" "${fv3_nml_ensmem_fps_str}") + +var_defns_file_contents=${var_defns_file_contents}"\ # #----------------------------------------------------------------------- #----------------------------------------------------------------------- # Section 2: -# This section defines variables that have been derived from the ones -# above by the setup script (setup.sh) and which are needed by one or -# more of the scripts that perform the workflow tasks (those scripts -# source this variable definitions file). +# This section defines variables that have been derived from the primary +# set of experiment variables above (we refer to these as \"derived\" or +# \"secondary\" variables). #----------------------------------------------------------------------- #----------------------------------------------------------------------- # @@ -2877,15 +2835,15 @@ done <<< "${line_list}" # #----------------------------------------------------------------------- # -# Full path to workflow launcher script, its log file, and the line that -# gets added to the cron table to launch this script if USE_CRON_TO_RELAUNCH -# is set to TRUE. +# Full path to workflow (re)launch script, its log file, and the line +# that gets added to the cron table to launch this script if the flag +# USE_CRON_TO_RELAUNCH is set to \"TRUE\". # #----------------------------------------------------------------------- # -WFLOW_LAUNCH_SCRIPT_FP="${WFLOW_LAUNCH_SCRIPT_FP}" -WFLOW_LAUNCH_LOG_FP="${WFLOW_LAUNCH_LOG_FP}" -CRONTAB_LINE="${CRONTAB_LINE}" +WFLOW_LAUNCH_SCRIPT_FP='${WFLOW_LAUNCH_SCRIPT_FP}' +WFLOW_LAUNCH_LOG_FP='${WFLOW_LAUNCH_LOG_FP}' +CRONTAB_LINE='${CRONTAB_LINE}' # #----------------------------------------------------------------------- # @@ -2893,44 +2851,44 @@ CRONTAB_LINE="${CRONTAB_LINE}" # #----------------------------------------------------------------------- # -SR_WX_APP_TOP_DIR="${SR_WX_APP_TOP_DIR}" -HOMErrfs="$HOMErrfs" -USHDIR="$USHDIR" -SCRIPTSDIR="$SCRIPTSDIR" -JOBSDIR="$JOBSDIR" -SORCDIR="$SORCDIR" -SRC_DIR="$SRC_DIR" -PARMDIR="$PARMDIR" -MODULES_DIR="${MODULES_DIR}" -EXECDIR="$EXECDIR" -FIXam="$FIXam" -FIXclim="$FIXclim" -FIXLAM="$FIXLAM" -FIXgsm="$FIXgsm" -FIXaer="$FIXaer" -FIXlut="$FIXlut" -COMROOT="$COMROOT" -COMOUT_BASEDIR="${COMOUT_BASEDIR}" -TEMPLATE_DIR="${TEMPLATE_DIR}" -VX_CONFIG_DIR="${VX_CONFIG_DIR}" -METPLUS_CONF="${METPLUS_CONF}" -MET_CONFIG="${MET_CONFIG}" -UFS_WTHR_MDL_DIR="${UFS_WTHR_MDL_DIR}" -UFS_UTILS_DIR="${UFS_UTILS_DIR}" -SFC_CLIMO_INPUT_DIR="${SFC_CLIMO_INPUT_DIR}" -TOPO_DIR="${TOPO_DIR}" -UPP_DIR="${UPP_DIR}" - -EXPTDIR="$EXPTDIR" -LOGDIR="$LOGDIR" -CYCLE_BASEDIR="${CYCLE_BASEDIR}" -GRID_DIR="${GRID_DIR}" -OROG_DIR="${OROG_DIR}" -SFC_CLIMO_DIR="${SFC_CLIMO_DIR}" - -NDIGITS_ENSMEM_NAMES="${NDIGITS_ENSMEM_NAMES}" -ENSMEM_NAMES=( $( printf "\"%s\" " "${ENSMEM_NAMES[@]}" )) -FV3_NML_ENSMEM_FPS=( $( printf "\"%s\" " "${FV3_NML_ENSMEM_FPS[@]}" )) +SR_WX_APP_TOP_DIR='${SR_WX_APP_TOP_DIR}' +HOMErrfs='$HOMErrfs' +USHDIR='$USHDIR' +SCRIPTSDIR='$SCRIPTSDIR' +JOBSDIR='$JOBSDIR' +SORCDIR='$SORCDIR' +SRC_DIR='${SRC_DIR}' +PARMDIR='$PARMDIR' +MODULES_DIR='${MODULES_DIR}' +EXECDIR='$EXECDIR' +FIXam='$FIXam' +FIXclim='$FIXclim' +FIXLAM='$FIXLAM' +FIXgsm='$FIXgsm' +FIXaer='$FIXaer' +FIXlut='$FIXlut' +COMROOT='$COMROOT' +COMOUT_BASEDIR='${COMOUT_BASEDIR}' +TEMPLATE_DIR='${TEMPLATE_DIR}' +VX_CONFIG_DIR='${VX_CONFIG_DIR}' +METPLUS_CONF='${METPLUS_CONF}' +MET_CONFIG='${MET_CONFIG}' +UFS_WTHR_MDL_DIR='${UFS_WTHR_MDL_DIR}' +UFS_UTILS_DIR='${UFS_UTILS_DIR}' +SFC_CLIMO_INPUT_DIR='${SFC_CLIMO_INPUT_DIR}' +TOPO_DIR='${TOPO_DIR}' +UPP_DIR='${UPP_DIR}' + +EXPTDIR='$EXPTDIR' +LOGDIR='$LOGDIR' +CYCLE_BASEDIR='${CYCLE_BASEDIR}' +GRID_DIR='${GRID_DIR}' +OROG_DIR='${OROG_DIR}' +SFC_CLIMO_DIR='${SFC_CLIMO_DIR}' + +NDIGITS_ENSMEM_NAMES='${NDIGITS_ENSMEM_NAMES}' +ENSMEM_NAMES=${ensmem_names_str} +FV3_NML_ENSMEM_FPS=${fv3_nml_ensmem_fps_str} # #----------------------------------------------------------------------- # @@ -2938,46 +2896,43 @@ FV3_NML_ENSMEM_FPS=( $( printf "\"%s\" " "${FV3_NML_ENSMEM_FPS[@]}" )) # #----------------------------------------------------------------------- # -GLOBAL_VAR_DEFNS_FP="${GLOBAL_VAR_DEFNS_FP}" -# Try this at some point instead of hard-coding it as above; it's a more -# flexible approach (if it works). -#GLOBAL_VAR_DEFNS_FP=$( $READLINK -f "${BASH_SOURCE[0]}" ) +GLOBAL_VAR_DEFNS_FP='${GLOBAL_VAR_DEFNS_FP}' -DATA_TABLE_TMPL_FN="${DATA_TABLE_TMPL_FN}" -DIAG_TABLE_TMPL_FN="${DIAG_TABLE_TMPL_FN}" -FIELD_TABLE_TMPL_FN="${FIELD_TABLE_TMPL_FN}" -MODEL_CONFIG_TMPL_FN="${MODEL_CONFIG_TMPL_FN}" -NEMS_CONFIG_TMPL_FN="${NEMS_CONFIG_TMPL_FN}" +DATA_TABLE_TMPL_FN='${DATA_TABLE_TMPL_FN}' +DIAG_TABLE_TMPL_FN='${DIAG_TABLE_TMPL_FN}' +FIELD_TABLE_TMPL_FN='${FIELD_TABLE_TMPL_FN}' +MODEL_CONFIG_TMPL_FN='${MODEL_CONFIG_TMPL_FN}' +NEMS_CONFIG_TMPL_FN='${NEMS_CONFIG_TMPL_FN}' -DATA_TABLE_TMPL_FP="${DATA_TABLE_TMPL_FP}" -DIAG_TABLE_TMPL_FP="${DIAG_TABLE_TMPL_FP}" -FIELD_TABLE_TMPL_FP="${FIELD_TABLE_TMPL_FP}" -FV3_NML_BASE_SUITE_FP="${FV3_NML_BASE_SUITE_FP}" -FV3_NML_YAML_CONFIG_FP="${FV3_NML_YAML_CONFIG_FP}" -FV3_NML_BASE_ENS_FP="${FV3_NML_BASE_ENS_FP}" -MODEL_CONFIG_TMPL_FP="${MODEL_CONFIG_TMPL_FP}" -NEMS_CONFIG_TMPL_FP="${NEMS_CONFIG_TMPL_FP}" +DATA_TABLE_TMPL_FP='${DATA_TABLE_TMPL_FP}' +DIAG_TABLE_TMPL_FP='${DIAG_TABLE_TMPL_FP}' +FIELD_TABLE_TMPL_FP='${FIELD_TABLE_TMPL_FP}' +FV3_NML_BASE_SUITE_FP='${FV3_NML_BASE_SUITE_FP}' +FV3_NML_YAML_CONFIG_FP='${FV3_NML_YAML_CONFIG_FP}' +FV3_NML_BASE_ENS_FP='${FV3_NML_BASE_ENS_FP}' +MODEL_CONFIG_TMPL_FP='${MODEL_CONFIG_TMPL_FP}' +NEMS_CONFIG_TMPL_FP='${NEMS_CONFIG_TMPL_FP}' -CCPP_PHYS_SUITE_FN="${CCPP_PHYS_SUITE_FN}" -CCPP_PHYS_SUITE_IN_CCPP_FP="${CCPP_PHYS_SUITE_IN_CCPP_FP}" -CCPP_PHYS_SUITE_FP="${CCPP_PHYS_SUITE_FP}" +CCPP_PHYS_SUITE_FN='${CCPP_PHYS_SUITE_FN}' +CCPP_PHYS_SUITE_IN_CCPP_FP='${CCPP_PHYS_SUITE_IN_CCPP_FP}' +CCPP_PHYS_SUITE_FP='${CCPP_PHYS_SUITE_FP}' -FIELD_DICT_FN="${FIELD_DICT_FN}" -FIELD_DICT_IN_UWM_FP="${FIELD_DICT_IN_UWM_FP}" -FIELD_DICT_FP="${FIELD_DICT_FP}" +FIELD_DICT_FN='${FIELD_DICT_FN}' +FIELD_DICT_IN_UWM_FP='${FIELD_DICT_IN_UWM_FP}' +FIELD_DICT_FP='${FIELD_DICT_FP}' -DATA_TABLE_FP="${DATA_TABLE_FP}" -FIELD_TABLE_FP="${FIELD_TABLE_FP}" -FV3_NML_FN="${FV3_NML_FN}" # This may not be necessary... -FV3_NML_FP="${FV3_NML_FP}" -NEMS_CONFIG_FP="${NEMS_CONFIG_FP}" +DATA_TABLE_FP='${DATA_TABLE_FP}' +FIELD_TABLE_FP='${FIELD_TABLE_FP}' +FV3_NML_FN='${FV3_NML_FN}' +FV3_NML_FP='${FV3_NML_FP}' +NEMS_CONFIG_FP='${NEMS_CONFIG_FP}' -FV3_EXEC_FP="${FV3_EXEC_FP}" +FV3_EXEC_FP='${FV3_EXEC_FP}' -LOAD_MODULES_RUN_TASK_FP="${LOAD_MODULES_RUN_TASK_FP}" +LOAD_MODULES_RUN_TASK_FP='${LOAD_MODULES_RUN_TASK_FP}' -THOMPSON_MP_CLIMO_FN="${THOMPSON_MP_CLIMO_FN}" -THOMPSON_MP_CLIMO_FP="${THOMPSON_MP_CLIMO_FP}" +THOMPSON_MP_CLIMO_FN='${THOMPSON_MP_CLIMO_FN}' +THOMPSON_MP_CLIMO_FP='${THOMPSON_MP_CLIMO_FP}' # #----------------------------------------------------------------------- # @@ -2985,115 +2940,99 @@ THOMPSON_MP_CLIMO_FP="${THOMPSON_MP_CLIMO_FP}" # #----------------------------------------------------------------------- # -RELATIVE_LINK_FLAG="${RELATIVE_LINK_FLAG}" +RELATIVE_LINK_FLAG='${RELATIVE_LINK_FLAG}' # #----------------------------------------------------------------------- # # Parameters that indicate whether or not various parameterizations are -# included in and called by the phsics suite. +# included in and called by the physics suite. # #----------------------------------------------------------------------- # -SDF_USES_RUC_LSM="${SDF_USES_RUC_LSM}" -SDF_USES_THOMPSON_MP="${SDF_USES_THOMPSON_MP}" +SDF_USES_RUC_LSM='${SDF_USES_RUC_LSM}' +SDF_USES_THOMPSON_MP='${SDF_USES_THOMPSON_MP}' # #----------------------------------------------------------------------- # -# Grid configuration parameters needed regardless of grid generation me- -# thod used. +# Grid configuration parameters needed regardless of grid generation +# method used. # #----------------------------------------------------------------------- # -GTYPE="$GTYPE" -TILE_RGNL="${TILE_RGNL}" -NH0="${NH0}" -NH3="${NH3}" -NH4="${NH4}" +GTYPE='$GTYPE' +TILE_RGNL='${TILE_RGNL}' +NH0='${NH0}' +NH3='${NH3}' +NH4='${NH4}' -LON_CTR="${LON_CTR}" -LAT_CTR="${LAT_CTR}" -NX="${NX}" -NY="${NY}" -NHW="${NHW}" -STRETCH_FAC="${STRETCH_FAC}" +LON_CTR='${LON_CTR}' +LAT_CTR='${LAT_CTR}' +NX='${NX}' +NY='${NY}' +NHW='${NHW}' +STRETCH_FAC='${STRETCH_FAC}' -RES_IN_FIXLAM_FILENAMES="${RES_IN_FIXLAM_FILENAMES}" +RES_IN_FIXLAM_FILENAMES='${RES_IN_FIXLAM_FILENAMES}' # -# If running the make_grid task, CRES will be set to a null string du- -# the grid generation step. It will later be set to an actual value af- -# ter the make_grid task is complete. +# If running the make_grid task, CRES will be set to a null string during +# the grid generation step. It will later be set to an actual value after +# the make_grid task is complete. # -CRES="$CRES" -EOM -} || print_err_msg_exit "\ -Heredoc (cat) command to append new variable definitions to variable -definitions file returned with a nonzero status." +CRES='$CRES' +" # #----------------------------------------------------------------------- # -# Append to the variable definitions file the defintions of grid parame- -# ters that are specific to the grid generation method used. +# Append to the variable definitions file the defintions of grid parameters +# that are specific to the grid generation method used. # #----------------------------------------------------------------------- # +grid_vars_str="" if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - { cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} + grid_vars_str="\ # #----------------------------------------------------------------------- # # Grid configuration parameters for a regional grid generated from a -# global parent cubed-sphere grid. This is the method originally sug- -# gested by GFDL since it allows GFDL's nested grid generator to be used -# to generate a regional grid. However, for large regional domains, it -# results in grids that have an unacceptably large range of cell sizes +# global parent cubed-sphere grid. This is the method originally +# suggested by GFDL since it allows GFDL's nested grid generator to be +# used to generate a regional grid. However, for large regional domains, +# it results in grids that have an unacceptably large range of cell sizes # (i.e. ratio of maximum to minimum cell size is not sufficiently close # to 1). # #----------------------------------------------------------------------- # -ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" -IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" -JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" -JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" -EOM -} || print_err_msg_exit "\ -Heredoc (cat) command to append grid parameters to variable definitions -file returned with a nonzero status." +ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' +IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' +JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' +JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' +" elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then - { cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} + grid_vars_str="\ # #----------------------------------------------------------------------- # -# Grid configuration parameters for a regional grid generated indepen- -# dently of a global parent grid. This method was developed by Jim Pur- -# ser of EMC and results in very uniform grids (i.e. ratio of maximum to -# minimum cell size is very close to 1). +# Grid configuration parameters for a regional grid generated independently +# of a global parent grid. This method was developed by Jim Purser of +# EMC and results in very uniform grids (i.e. ratio of maximum to minimum +# cell size is very close to 1). # #----------------------------------------------------------------------- # -DEL_ANGLE_X_SG="${DEL_ANGLE_X_SG}" -DEL_ANGLE_Y_SG="${DEL_ANGLE_Y_SG}" -NEG_NX_OF_DOM_WITH_WIDE_HALO="${NEG_NX_OF_DOM_WITH_WIDE_HALO}" -NEG_NY_OF_DOM_WITH_WIDE_HALO="${NEG_NY_OF_DOM_WITH_WIDE_HALO}" -PAZI="${PAZI}" -EOM -} || print_err_msg_exit "\ -Heredoc (cat) command to append grid parameters to variable definitions -file returned with a nonzero status." +DEL_ANGLE_X_SG='${DEL_ANGLE_X_SG}' +DEL_ANGLE_Y_SG='${DEL_ANGLE_Y_SG}' +NEG_NX_OF_DOM_WITH_WIDE_HALO='${NEG_NX_OF_DOM_WITH_WIDE_HALO}' +NEG_NY_OF_DOM_WITH_WIDE_HALO='${NEG_NY_OF_DOM_WITH_WIDE_HALO}' +PAZI='${PAZI}' +" fi -# -#----------------------------------------------------------------------- -# -# Because RUN_CMD_FCST can include PE_MEMBER01 (and theoretically other -# variables calculated in this script), delete the first occurrence of it -# in the var_defns file, and write it again at the end. -# -#----------------------------------------------------------------------- -$SED -i '/^RUN_CMD_FCST=/d' $GLOBAL_VAR_DEFNS_FP +var_defns_file_contents="${var_defns_file_contents}${grid_vars_str}" # #----------------------------------------------------------------------- # @@ -3102,15 +3041,22 @@ $SED -i '/^RUN_CMD_FCST=/d' $GLOBAL_VAR_DEFNS_FP # #----------------------------------------------------------------------- # -{ cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} +lbc_spec_fcst_hrs_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${LBC_SPEC_FCST_HRS[@]}") +lbc_spec_fcst_hrs_str=$(printf "( %s${escbksl_nl_or_null})" "${lbc_spec_fcst_hrs_str}") + +all_cdates_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${ALL_CDATES[@]}") +all_cdates_str=$(printf "( %s${escbksl_nl_or_null})" "${all_cdates_str}") + +var_defns_file_contents=${var_defns_file_contents}"\ # #----------------------------------------------------------------------- # -# CPL: parameter for coupling in model_configure +# Flag in the \"${MODEL_CONFIG_FN}\" file for coupling the ocean model to +# the weather model. # #----------------------------------------------------------------------- # -CPL="${CPL}" +CPL='${CPL}' # #----------------------------------------------------------------------- # @@ -3119,11 +3065,11 @@ CPL="${CPL}" # #----------------------------------------------------------------------- # -OZONE_PARAM="${OZONE_PARAM}" +OZONE_PARAM='${OZONE_PARAM}' # #----------------------------------------------------------------------- # -# If USE_USER_STAGED_EXTRN_FILES is set to "FALSE", this is the system +# If USE_USER_STAGED_EXTRN_FILES is set to \"FALSE\", this is the system # directory in which the workflow scripts will look for the files generated # by the external model specified in EXTRN_MDL_NAME_ICS. These files will # be used to generate the input initial condition and surface files for @@ -3131,11 +3077,11 @@ OZONE_PARAM="${OZONE_PARAM}" # #----------------------------------------------------------------------- # -EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS}" +EXTRN_MDL_SYSBASEDIR_ICS='${EXTRN_MDL_SYSBASEDIR_ICS}' # #----------------------------------------------------------------------- # -# If USE_USER_STAGED_EXTRN_FILES is set to "FALSE", this is the system +# If USE_USER_STAGED_EXTRN_FILES is set to \"FALSE\", this is the system # directory in which the workflow scripts will look for the files generated # by the external model specified in EXTRN_MDL_NAME_LBCS. These files # will be used to generate the input lateral boundary condition files for @@ -3143,7 +3089,7 @@ EXTRN_MDL_SYSBASEDIR_ICS="${EXTRN_MDL_SYSBASEDIR_ICS}" # #----------------------------------------------------------------------- # -EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS}" +EXTRN_MDL_SYSBASEDIR_LBCS='${EXTRN_MDL_SYSBASEDIR_LBCS}' # #----------------------------------------------------------------------- # @@ -3152,7 +3098,7 @@ EXTRN_MDL_SYSBASEDIR_LBCS="${EXTRN_MDL_SYSBASEDIR_LBCS}" # #----------------------------------------------------------------------- # -EXTRN_MDL_LBCS_OFFSET_HRS="${EXTRN_MDL_LBCS_OFFSET_HRS}" +EXTRN_MDL_LBCS_OFFSET_HRS='${EXTRN_MDL_LBCS_OFFSET_HRS}' # #----------------------------------------------------------------------- # @@ -3161,31 +3107,33 @@ EXTRN_MDL_LBCS_OFFSET_HRS="${EXTRN_MDL_LBCS_OFFSET_HRS}" # #----------------------------------------------------------------------- # -LBC_SPEC_FCST_HRS=(${LBC_SPEC_FCST_HRS[@]}) +LBC_SPEC_FCST_HRS=${lbc_spec_fcst_hrs_str} # #----------------------------------------------------------------------- # -# The number of cycles for which to make forecasts and the list of starting -# dates/hours of these cycles. +# The number of cycles for which to make forecasts and the list of +# starting dates/hours of these cycles. # #----------------------------------------------------------------------- # -NUM_CYCLES="${NUM_CYCLES}" -ALL_CDATES=( \\ -$( printf "\"%s\" \\\\\n" "${ALL_CDATES[@]}" ) -) +NUM_CYCLES='${NUM_CYCLES}' +ALL_CDATES=${all_cdates_str} # #----------------------------------------------------------------------- # -# If USE_FVCOM is set to TRUE, then FVCOM data (located in FVCOM_DIR -# in FVCOM_FILE) will be used to update lower boundary conditions during -# make_ics. +# Parameters that determine whether FVCOM data will be used, and if so, +# their location. +# +# If USE_FVCOM is set to \"TRUE\", then FVCOM data (in the file FVCOM_FILE +# located in the directory FVCOM_DIR) will be used to update the surface +# boundary conditions during the initial conditions generation task +# (MAKE_ICS_TN). # #----------------------------------------------------------------------- # -USE_FVCOM="${USE_FVCOM}" -FVCOM_DIR="${FVCOM_DIR}" -FVCOM_FILE="${FVCOM_FILE}" +USE_FVCOM='${USE_FVCOM}' +FVCOM_DIR='${FVCOM_DIR}' +FVCOM_FILE='${FVCOM_FILE}' # #----------------------------------------------------------------------- # @@ -3193,22 +3141,26 @@ FVCOM_FILE="${FVCOM_FILE}" # #----------------------------------------------------------------------- # -NCORES_PER_NODE="${NCORES_PER_NODE}" -PE_MEMBER01="${PE_MEMBER01}" -RUN_CMD_FCST="$(eval echo \'${RUN_CMD_FCST}\')" +PE_MEMBER01='${PE_MEMBER01}' # #----------------------------------------------------------------------- # -# IF DO_SPP="TRUE," N_VAR_SPP is the number of parameterizations that -# are perturbed with SPP, otherwise N_VAR_SPP=0. +# IF DO_SPP is set to \"TRUE\", N_VAR_SPP specifies the number of physics +# parameterizations that are perturbed with SPP. Otherwise, N_VAR_SPP +# is set 0. # #----------------------------------------------------------------------- # -N_VAR_SPP="${N_VAR_SPP}" -EOM -} || print_err_msg_exit "\ -Heredoc (cat) command to append new variable definitions to variable -definitions file returned with a nonzero status." +N_VAR_SPP='${N_VAR_SPP}' +" +# +# Done with constructing the contents of the variable definitions file, +# so now write the contents to file. +# +printf "%s\n" "${var_defns_file_contents}" >> ${GLOBAL_VAR_DEFNS_FP} + +print_info_msg "$VERBOSE" " +Done generating the global experiment variable definitions file." # #----------------------------------------------------------------------- # diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh index 375543d354..60162aa40f 100644 --- a/ush/source_util_funcs.sh +++ b/ush/source_util_funcs.sh @@ -211,6 +211,15 @@ function source_util_funcs() { #----------------------------------------------------------------------- # . ${bashutils_dir}/create_symlink_to_file.sh +# +#----------------------------------------------------------------------- +# +# Source the file containing the function that gets the stripped contents +# of a bash script or function. +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/get_bash_file_contents.sh } source_util_funcs