diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index fcf5836c7c..64a45b1808 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -16,7 +16,8 @@ pipeline { choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion'], description: 'Specify the platform(s) to use') // Allow job runner to filter based on compiler choice(name: 'SRW_COMPILER_FILTER', choices: ['all', 'gnu', 'intel'], description: 'Specify the compiler(s) to use to build') - booleanParam name: 'SRW_WE2E_COMPREHENSIVE_TESTS', defaultValue: false, description: 'Whether to execute the comprehensive end-to-end tests' + // Uncomment the following line to re-enable comprehensive tests + // booleanParam name: 'SRW_WE2E_COMPREHENSIVE_TESTS', defaultValue: false, description: 'Whether to execute the comprehensive end-to-end tests' } stages { @@ -89,7 +90,7 @@ pipeline { exclude { axis { name 'SRW_PLATFORM' - values 'gaea', 'hera', 'jet', 'orion' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1' + values 'gaea', 'jet', 'orion' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1' } axis { @@ -99,6 +100,10 @@ pipeline { } } + agent { + label env.SRW_PLATFORM + } + environment { BRANCH_NAME_ESCAPED = env.BRANCH_NAME.replace('/', '_') BUILD_VERSION = "${env.SRW_PLATFORM}-${env.SRW_COMPILER}-${env.BRANCH_NAME_ESCAPED}-${env.BUILD_NUMBER}" @@ -109,12 +114,8 @@ pipeline { stages { // Clean the workspace, checkout the repository, and run checkout_externals stage('Initialize') { - agent { - label env.SRW_PLATFORM - } - steps { - echo "Initializing SRW (${env.SRW_COMPILER}) build environment on ${env.SRW_PLATFORM}" + echo "Initializing SRW (${env.SRW_COMPILER}) build environment on ${env.SRW_PLATFORM} (using ${env.WORKSPACE})" cleanWs() checkout scm sh '"${WORKSPACE}/manage_externals/checkout_externals"' @@ -123,12 +124,8 @@ pipeline { // Run the unified build script; if successful create a tarball of the build and upload to S3 stage('Build') { - agent { - label env.SRW_PLATFORM - } - steps { - echo "Building SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM}" + echo "Building SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE})" sh 'bash --login "${WORKSPACE}/.cicd/scripts/srw_build.sh"' } @@ -142,17 +139,18 @@ pipeline { // Run the unified test script stage('Test') { - agent { - label env.SRW_PLATFORM - } - environment { SRW_WE2E_EXPERIMENT_BASE_DIR = "${env.WORKSPACE}/expt_dirs" } steps { - echo "Testing SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM}" + echo "Testing SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE})" + + // Remove the following line to re-enable comprehensive tests + sh 'SRW_WE2E_COMPREHENSIVE_TESTS=false bash --login "${WORKSPACE}/.cicd/scripts/srw_test.sh"' + // Uncomment the following block to re-enable comprehensive tests + /* // If executing for a Pull Request, check for the run_we2e_comprehensive_tests. If set, // override the value of the SRW_WE2E_COMPREHENSIVE_TESTS parameter script { @@ -169,12 +167,13 @@ pipeline { sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests}" + ' bash --login "${WORKSPACE}/.cicd/scripts/srw_test.sh"' } + */ } post { always { // Archive the test log files - sh 'cd "${SRW_WE2E_EXPERIMENT_BASE_DIR}" && tar --create --gzip --verbose --file "${WORKSPACE}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" */log.generate_FV3LAM_wflow */log.launch_FV3LAM_wflow */log/*' + sh 'cd "${SRW_WE2E_EXPERIMENT_BASE_DIR}" && tar --create --gzip --verbose --dereference --file "${WORKSPACE}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" */log.generate_FV3LAM_wflow */log.launch_FV3LAM_wflow */log/*' // Remove the data sets from the experiments directory to conserve disk space sh 'find "${SRW_WE2E_EXPERIMENT_BASE_DIR}" -regextype posix-extended -regex "^.*(orog|[0-9]{10})$" -type d | xargs rm -rf' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'woc-epic-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: 'we2e_test_results-*-*.txt', storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'woc-epic-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: 'we2e_test_logs-*-*.tgz', storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] diff --git a/.cicd/scripts/srw_build.sh b/.cicd/scripts/srw_build.sh index e55c3f2fc5..10327366f0 100755 --- a/.cicd/scripts/srw_build.sh +++ b/.cicd/scripts/srw_build.sh @@ -25,8 +25,11 @@ else fi # Build and install -cd ${workspace}/test +cd ${workspace}/tests +set +e ./build.sh ${platform} ${SRW_COMPILER} +build_exit=$? +set -e cd - # Create combined log file for upload to s3 @@ -34,3 +37,4 @@ build_dir="${workspace}/build_${SRW_COMPILER}" cat ${build_dir}/log.cmake ${build_dir}/log.make \ >${build_dir}/srw_build-${platform}-${SRW_COMPILER}.log +exit $build_exit diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh index e884a66aad..8df2ff5c2f 100755 --- a/.cicd/scripts/srw_test.sh +++ b/.cicd/scripts/srw_test.sh @@ -28,6 +28,7 @@ fi # Test directories we2e_experiment_base_dir="${workspace}/expt_dirs" we2e_test_dir="${workspace}/tests/WE2E" +nco_dir="${workspace}/nco_dirs" # Run the end-to-end tests. if "${SRW_WE2E_COMPREHENSIVE_TESTS}"; then @@ -37,15 +38,26 @@ else fi cd ${we2e_test_dir} -./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} ${we2e_experiment_base_dir} +./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ + expt_basedir=${we2e_experiment_base_dir} \ + opsroot=${nco_dir} -# Allow the tests to start before checking for status. -# TODO: Create a parameter that sets the initial start delay. -sleep 300 +# Run the new run_srw_tests script if the machine is Cheyenne. +if [[ "${platform}" = "cheyenne" ]]; then + cd ${workspace}/ush + ./run_srw_tests.py -e=${we2e_experiment_base_dir} + cd ${we2e_test_dir} +fi # Progress file progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt" +# Allow the tests to start before checking for status. +# TODO: Create a parameter that sets the initial start delay. +if [[ "${platform}" != "cheyenne" ]]; then + sleep 300 +fi + # Wait for all tests to complete. while true; do @@ -71,10 +83,15 @@ done # TODO: Create parameter that sets the interval for the we2e cron jobs; this # value should be some factor of that interval to ensure the cron jobs execute # before the workspace is cleaned up. -sleep 600 +if [[ "${platform}" != "cheyenne" ]]; then + sleep 600 +fi # Set exit code to number of failures set +e failures=$(grep "Workflow status: FAILURE" ${progress_file} | wc -l) +if [[ $failures -ne 0 ]]; then + failures=1 +fi set -e exit ${failures} diff --git a/Externals.cfg b/Externals.cfg index 926839020b..04d60aa80c 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 52072c5 +hash = 84b28ec local_path = sorc/ufs-weather-model required = True diff --git a/README.md b/README.md index 6956aa7a40..d4268e5e80 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,18 @@ # UFS Short-Range Weather Application -The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. It is designed to be the source system for NOAA’s operational numerical weather prediction applications while enabling research, development, and contribution opportunities for the broader weather enterprise. For more information about the UFS, visit the UFS Portal at https://ufscommunity.org/. +The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. It is designed to be the source system for NOAA’s operational numerical weather prediction applications while enabling research, development, and contribution opportunities for the broader Weather Enterprise. For more information about the UFS, visit the UFS Portal at https://ufscommunity.org/. -The UFS includes multiple applications (see a complete list at https://ufscommunity.org/science/aboutapps/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.0.0) represents a snapshot of this continuously evolving system. +The UFS includes multiple applications (see a complete list at https://ufscommunity.org/science/aboutapps/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.1.0) represents a snapshot of this continuously evolving system. -The UFS SRW App User's Guide associated with the development branch is at: https://ufs-srweather-app.readthedocs.io/en/develop/, while the guide specific to the SRW App v2.0.0 release can be found at: https://ufs-srweather-app.readthedocs.io/en/release-public-v2/. The repository is at: https://github.com/ufs-community/ufs-srweather-app. +The UFS SRW App User's Guide associated with the development branch is at: https://ufs-srweather-app.readthedocs.io/en/develop/, while the guide specific to the SRW App v2.1.0 release can be found at: https://ufs-srweather-app.readthedocs.io/en/release-public-v2.1.0/. The repository is at: https://github.com/ufs-community/ufs-srweather-app. For instructions on how to clone the repository, build the code, and run the workflow, see: https://github.com/ufs-community/ufs-srweather-app/wiki/Getting-Started -UFS Development Team. (2022, June 23). Unified Forecast System (UFS) Short-Range Weather (SRW) Application (Version v2.0.0). Zenodo. https://doi.org/10.5281/zenodo.6505854 +For a debugging guide for users and developers in the field of Earth System Modeling, please see: +https://epic.noaa.gov/wp-content/uploads/2022/12/Debugging-Guide.pdf + +UFS Development Team. (2022, Nov. 17). Unified Forecast System (UFS) Short-Range Weather (SRW) Application (Version v2.1.0). Zenodo. https://doi.org/10.5281/zenodo.7277602 [![Python unittests](https://github.com/ufs-community/ufs-srweather-app/actions/workflows/python_unittests.yaml/badge.svg)](https://github.com/ufs-community/ufs-srweather-app/actions/workflows/python_unittests.yaml) [![Python functional tests](https://github.com/ufs-community/ufs-srweather-app/actions/workflows/python_func_tests.yaml/badge.svg)](https://github.com/ufs-community/ufs-srweather-app/actions/workflows/python_func_tests.yaml) diff --git a/devbuild.sh b/devbuild.sh index 045611d17d..d31ed0eca7 100755 --- a/devbuild.sh +++ b/devbuild.sh @@ -33,6 +33,8 @@ OPTIONS does a "make clean" --build does a "make" (build only) + --move + move binaries to final location. --build-dir=BUILD_DIR build directory --install-dir=INSTALL_DIR @@ -125,6 +127,7 @@ BUILD_RRFS_UTILS="off" # Make options CLEAN=false BUILD=false +MOVE=false USE_SUB_MODULES=false #change default to true later # process required arguments @@ -155,6 +158,7 @@ while :; do --continue=?*|--continue=) usage_error "$1 argument ignored." ;; --clean) CLEAN=true ;; --build) BUILD=true ;; + --move) MOVE=true ;; --build-dir=?*) BUILD_DIR=${1#*=} ;; --build-dir|--build-dir=) usage_error "$1 requires argument." ;; --install-dir=?*) INSTALL_DIR=${1#*=} ;; @@ -245,7 +249,7 @@ fi # set MODULE_FILE for this platform/compiler combination MODULE_FILE="build_${PLATFORM}_${COMPILER}" -if [ ! -f "${SRW_DIR}/modulefiles/${MODULE_FILE}" ]; then +if [ ! -f "${SRW_DIR}/modulefiles/${MODULE_FILE}.lua" ]; then printf "ERROR: module file does not exist for platform/compiler\n" >&2 printf " MODULE_FILE=${MODULE_FILE}\n" >&2 printf " PLATFORM=${PLATFORM}\n" >&2 @@ -398,18 +402,31 @@ module list mkdir -p ${BUILD_DIR} cd ${BUILD_DIR} -printf "... Generate CMAKE configuration ...\n" -cmake ${SRW_DIR} ${CMAKE_SETTINGS} 2>&1 | tee log.cmake - if [ "${CLEAN}" = true ]; then - printf "... Clean executables ...\n" - make ${MAKE_SETTINGS} clean 2>&1 | tee log.make + if [ -f $PWD/Makefile ]; then + printf "... Clean executables ...\n" + make ${MAKE_SETTINGS} clean 2>&1 | tee log.make + fi elif [ "${BUILD}" = true ]; then + printf "... Generate CMAKE configuration ...\n" + cmake ${SRW_DIR} ${CMAKE_SETTINGS} 2>&1 | tee log.cmake + printf "... Compile executables ...\n" make ${MAKE_SETTINGS} build 2>&1 | tee log.make else + printf "... Generate CMAKE configuration ...\n" + cmake ${SRW_DIR} ${CMAKE_SETTINGS} 2>&1 | tee log.cmake + printf "... Compile and install executables ...\n" make ${MAKE_SETTINGS} install 2>&1 | tee log.make + + if [ "${MOVE}" = true ]; then + if [[ ! ${SRW_DIR} -ef ${INSTALL_DIR} ]]; then + printf "... Moving executables to final locations ...\n" + mkdir -p ${SRW_DIR}/${BIN_DIR} + mv ${INSTALL_DIR}/${BIN_DIR}/* ${SRW_DIR}/${BIN_DIR} + fi + fi fi exit 0 diff --git a/devclean.sh b/devclean.sh new file mode 100755 index 0000000000..36fbc36898 --- /dev/null +++ b/devclean.sh @@ -0,0 +1,148 @@ +#!/bin/bash + +# usage instructions +usage () { +cat << EOF_USAGE + +Clean the UFS-SRW Application build +Usage: $0 [OPTIONS] ... + +OPTIONS + -h, --help + show this help guide + -a, --all + removes "bin", "build" directories, and other build artifacts + --remove + removes the "build" directory, keeps the "bin", "lib" and other build artifacts intact + --clean + removes "bin", "build" directories, and other build artifacts (same as "-a", "--all") + --install-dir=INSTALL_DIR + installation directory name (\${SRW_DIR} by default) + --build-dir=BUILD_DIR + main build directory, absolute path (\${SRW_DIR}/build/ by default) + --bin-dir=BIN_DIR + binary directory name ("exec" by default); full path is \${INSTALL_DIR}/\${BIN_DIR}) + --sub-modules + remove sub-module directories. They will need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds + -v, --verbose + provide more verbose output + +EOF_USAGE +} + +# print settings +settings () { +cat << EOF_SETTINGS +Settings: + + INSTALL_DIR=${INSTALL_DIR} + BUILD_DIR=${BUILD_DIR} + BIN_DIR=${BIN_DIR} + REMOVE=${REMOVE} + VERBOSE=${VERBOSE} + +Default cleaning options: (if no arguments provided, then nothing is cleaned) + REMOVE=${REMOVE} + CLEAN=${CLEAN} + INCLUDE_SUB_MODULES=${INCLUDE_SUB_MODULES} + +EOF_SETTINGS +} + +# print usage error and exit +usage_error () { + printf "ERROR: $1\n" >&2 + usage >&2 + exit 1 +} + +# default settings +SRW_DIR=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )" && pwd -P) +INSTALL_DIR=${INSTALL_DIR:-${SRW_DIR}} +BUILD_DIR=${BUILD_DIR:-"${SRW_DIR}/build"} +BIN_DIR="exec" +REMOVE=false +VERBOSE=false + +# default clean options +REMOVE=false +CLEAN=false +INCLUDE_SUB_MODULES=false #changes to true if '--sub-modules' option is provided + +# process requires arguments +if [[ ("$1" == "--help") || ("$1" == "-h") ]]; then + usage + exit 0 +fi +# process optional arguments +while :; do + case $1 in + --help|-h) usage; exit 0 ;; + --all|-a) ALL_CLEAN=true ;; + --remove) REMOVE=true ;; + --remove=?*|--remove=) usage_error "$1 argument ignored." ;; + --clean) CLEAN=true ;; + --install-dir=?*) INSTALL_DIR=${1#*=} ;; + --install-dir|--install-dir=) usage_error "$1 requires argument." ;; + --build-dir=?*) BUILD_DIR=${1#*=} ;; + --build-dir|--build-dir=) usage_error "$1 requires argument." ;; + --bin-dir=?*) BIN_DIR=${1#*=} ;; + --bin-dir|--bin-dir=) usage_error "$1 requires argument." ;; + --sub-modules) INCLUDE_SUB_MODULES=true ;; + --verbose|-v) VERBOSE=true ;; + --verbose=?*|--verbose=) usage_error "$1 argument ignored." ;; + # targets + default) ALL_CLEAN=false ;; + # unknown + -?*|?*) usage_error "Unknown option $1" ;; + *) usage; break ;; + esac + shift +done + +# choose defaults to clean +if [ "${ALL_CLEAN}" = true ]; then + CLEAN=true +fi + +# print settings +if [ "${VERBOSE}" = true ] ; then + settings +fi + +# clean if build directory already exists +if [ "${REMOVE}" = true ] && [ "${CLEAN}" = false ] ; then + printf '%s\n' "Remove the \"build\" directory only, BUILD_DIR = $BUILD_DIR " + [[ -d ${BUILD_DIR} ]] && rm -rf ${BUILD_DIR} && printf '%s\n' "rm -rf ${BUILD_DIR}" +elif [ "${CLEAN}" = true ]; then + printf '%s\n' "Remove build directory, bin directory, and other build artifacts " + printf '%s\n' " from the installation directory = ${INSTALL_DIR} " + [[ -d "${BUILD_DIR}" ]] && rm -rf ${BUILD_DIR} && printf '%s\n' "rm -rf ${BUILD_DIR}" + [[ -d "${INSTALL_DIR}/${BIN_DIR}" ]] && ( rm -rf ${INSTALL_DIR}/${BIN_DIR} && printf '%s\n' "rm -rf ${INSTALL_DIR}/${BIN_DIR}" ) + [[ -d "${SRW_DIR}/${BIN_DIR}" ]] && ( rm -rf ${SRW_DIR}/${BIN_DIR} && printf '%s\n' "rm -rf ${SRW_DIR}/${BIN_DIR}" ) + [[ -d "${INSTALL_DIR}/share" ]] && ( rm -rf ${INSTALL_DIR}/share && printf '%s\n' "rm -rf ${INSTALL_DIR}/share" ) + [[ -d "${INSTALL_DIR}/include" ]] && ( rm -rf ${INSTALL_DIR}/include && printf '%s\n' "rm -rf ${INSTALL_DIR}/include" ) + [[ -d "${INSTALL_DIR}/lib" ]] && rm -rf ${INSTALL_DIR}/lib && printf '%s\n' "rm -rf ${INSTALL_DIR}/lib" + [[ -d "${INSTALL_DIR}/lib64" ]] && rm -rf ${INSTALL_DIR}/lib && printf '%s\n' "rm -rf ${INSTALL_DIR}/lib64" + [[ -d "${SRW_DIR}/manage_externals/manic" ]] && rm -f ${SRW_DIR}/manage_externals/manic/*.pyc && printf '%s\n' "rm -f ${SRW_DIR}/manage_externals/manic/*.pyc" + echo " " +fi +# Clean all the submodules if requested. NB: Need to check out them again before attempting subsequent builds, by sourcing ${SRW_DIR}/manage_externals/checkout_externals +if [ ${INCLUDE_SUB_MODULES} == true ]; then + printf '%s\n' "Removing submodules ..." + declare -a submodules='()' + submodules=(${SRW_DIR}/sorc/*) +# echo " submodules are: ${submodules[@]} (total of ${#submodules[@]}) " + if [ ${#submodules[@]} -ge 1 ]; then + for sub in ${submodules[@]}; do [[ -d "${sub}" ]] && ( rm -rf ${sub} && printf '%s\n' "rm -rf ${sub}" ); done + fi + printf '%s\n' "NB: Need to check out submodules again for any subsequent builds, " \ + " by sourcing ${SRW_DIR}/manage_externals/checkout_externals " +fi +# +echo " " +echo "All the requested cleaning tasks have been completed" +echo " " + +exit 0 + diff --git a/docs/UsersGuide/requirements.txt b/docs/UsersGuide/requirements.txt index 9c7258463b..7be32f526d 100644 --- a/docs/UsersGuide/requirements.txt +++ b/docs/UsersGuide/requirements.txt @@ -1,2 +1,3 @@ sphinxcontrib-bibtex sphinx_rtd_theme +docutils==0.16 \ No newline at end of file diff --git a/docs/UsersGuide/source/BuildRunSRW.rst b/docs/UsersGuide/source/BuildRunSRW.rst deleted file mode 100644 index 9e0da6016a..0000000000 --- a/docs/UsersGuide/source/BuildRunSRW.rst +++ /dev/null @@ -1,1632 +0,0 @@ -.. _BuildRunSRW: - -===================================== -Building and Running the SRW App -===================================== - -The Unified Forecast System (:term:`UFS`) Short-Range Weather (SRW) Application is an :term:`umbrella repository` consisting of a number of different :ref:`components ` housed in external repositories. Once the SRW App is built, users can configure an experiment and generate predictions of atmospheric behavior over a limited spatial area and on time scales ranging from minutes out to several days. - -This chapter walks users through how to build and run the "out-of-the-box" case for the SRW App. However, the steps are relevant to any SRW Application experiment and can be modified to suit user goals. The out-of-the-box SRW App case builds a weather forecast for June 15-16, 2019. Multiple convective weather events during these two days produced over 200 filtered storm reports. Severe weather was clustered in two areas: the Upper Midwest through the Ohio Valley and the Southern Great Plains. This forecast uses a predefined 25-km Continental United States (:term:`CONUS`) domain (RRFS_CONUS_25km), the Global Forecast System (:term:`GFS`) version 16 physics suite (FV3_GFS_v16 :term:`CCPP`), and :term:`FV3`-based GFS raw external model data for initialization. - -.. attention:: - - The SRW Application has `four levels of support `__. The steps described in this chapter will work most smoothly on preconfigured (Level 1) systems. This chapter can also serve as a starting point for running the SRW App on other systems (including generic Linux/Mac systems), but the user may need to perform additional troubleshooting. - -.. note:: - The :ref:`container approach ` is recommended for a smoother first-time build and run experience. Building without a container may allow for more customization. However, the non-container approach requires more in-depth system-based knowledge, especially on Level 3 and 4 systems, so it is less appropriate for beginners. - -The overall procedure for generating an experiment is shown in :numref:`Figure %s `, with the scripts to generate and run the workflow shown in red. The steps are as follows: - - #. :ref:`Install prerequisites ` - #. :ref:`Clone the SRW App from GitHub ` - #. :ref:`Check out the external repositories ` - #. :ref:`Set up the build environment and build the executables ` - #. :ref:`Download and stage data ` - #. :ref:`Optional: Configure a new grid ` - #. :ref:`Generate a regional workflow experiment ` - - * :ref:`Configure the experiment parameters ` - * :ref:`Load the python environment for the regional workflow ` - - #. :ref:`Run the regional workflow ` - #. :ref:`Optional: Plot the output ` - -.. _AppOverallProc: - -.. figure:: _static/FV3LAM_wflow_overall.png - :alt: Flowchart describing the SRW App workflow steps. - - *Overall layout of the SRW App Workflow* - - -.. _HPCstackInfo: - -Install the HPC-Stack -======================== - -.. Attention:: - Skip the HPC-Stack installation if working on a `Level 1 system `_ (e.g., Cheyenne, Hera, Orion, NOAA Cloud). - -**Definition:** :term:`HPC-Stack` is a repository that provides a unified, shell script-based build system to build the software stack required for `UFS `_ applications such as the SRW App. - -Background ----------------- - -The UFS Weather Model draws on over 50 code libraries to run its applications. These libraries range from libraries developed in-house at NOAA (e.g., NCEPLIBS, FMS) to libraries developed by NOAA's partners (e.g., PIO, ESMF) to truly third party libraries (e.g., NETCDF). Individual installation of these libraries is not practical, so the `HPC-Stack `__ was developed as a central installation system to ensure that the infrastructure environment across multiple platforms is as similar as possible. Installation of the HPC-Stack is required to run the SRW App. - -Instructions -------------------------- -Users working on systems that fall under `Support Levels 2-4 `__ will need to install the HPC-Stack the first time they try to build applications (such as the SRW App) that depend on it. Users can either build the HPC-Stack on their local system or use the centrally maintained stacks on each HPC platform if they are working on a Level 1 system. Before installing the HPC-Stack, users on both Linux and MacOS systems should set the stack size to "unlimited" (if allowed) or to the largest possible value: - -.. code-block:: console - - # Linux, if allowed - ulimit -s unlimited - - # MacOS, this corresponds to 65MB - ulimit -S -s unlimited - -For a detailed description of installation options, see :ref:`Installing the HPC-Stack `. - -After completing installation, continue to the next section (:numref:`Section %s: Download the UFS SRW Application Code `). - -.. _DownloadSRWApp: - -Download the UFS SRW Application Code -====================================== -The SRW Application source code is publicly available on GitHub. To download the SRW App code, clone the ``develop`` branch of the repository: - -.. code-block:: console - - git clone -b develop https://github.com/ufs-community/ufs-srweather-app.git - -The cloned repository contains the configuration files and sub-directories shown in -:numref:`Table %s `. The user may set an ``$SRW`` environment variable to point to the location of the new ``ufs-srweather-app`` repository. For example, if ``ufs-srweather-app`` was cloned into the ``$HOME`` directory, the following commands will set an ``$SRW`` environment variable in a bash or csh shell, respectively: - -.. code-block:: console - - export SRW=$HOME/ufs-srweather-app - setenv SRW $HOME/ufs-srweather-app - -.. _FilesAndSubDirs: - -.. table:: Files and sub-directories of the ufs-srweather-app repository - - +--------------------------------+--------------------------------------------------------+ - | **File/Directory Name** | **Description** | - +================================+========================================================+ - | CMakeLists.txt | Main CMake file for SRW App | - +--------------------------------+--------------------------------------------------------+ - | Externals.cfg | Includes tags pointing to the correct version of the | - | | external GitHub repositories/branches used in the SRW | - | | App. | - +--------------------------------+--------------------------------------------------------+ - | LICENSE.md | CC0 license information | - +--------------------------------+--------------------------------------------------------+ - | README.md | Getting Started Guide | - +--------------------------------+--------------------------------------------------------+ - | ufs_srweather_app_meta.h.in | Meta information for SRW App which can be used by | - | | other packages | - +--------------------------------+--------------------------------------------------------+ - | ufs_srweather_app.settings.in | SRW App configuration summary | - +--------------------------------+--------------------------------------------------------+ - | modulefiles | Contains build and workflow modulefiles | - +--------------------------------+--------------------------------------------------------+ - | etc | Contains Lmod startup scripts | - +--------------------------------+--------------------------------------------------------+ - | docs | Contains release notes, documentation, and User's Guide| - +--------------------------------+--------------------------------------------------------+ - | manage_externals | Utility for checking out external repositories | - +--------------------------------+--------------------------------------------------------+ - | src | Contains CMakeLists.txt; external repositories | - | | will be cloned into this directory. | - +--------------------------------+--------------------------------------------------------+ - - -.. _CheckoutExternals: - -Check Out External Components -================================ - -The SRW App relies on a variety of components (e.g., regional_workflow, UFS_UTILS, ufs-weather-model, and UPP) detailed in :numref:`Chapter %s ` of this User's Guide. Each component has its own repository. Users must run the ``checkout_externals`` script to collect the individual components of the SRW App from their respective Git repositories. The ``checkout_externals`` script uses the configuration file ``Externals.cfg`` in the top level directory of the SRW App to clone the correct tags (code versions) of the external repositories listed in :numref:`Section %s ` into the appropriate directories under the ``regional_workflow`` and ``src`` directories. - -Run the executable that pulls in SRW App components from external repositories: - -.. code-block:: console - - cd $SRW - ./manage_externals/checkout_externals - -The script should output dialogue indicating that it is retrieving different code repositories. It may take several minutes to download these repositories. - - -.. _BuildExecutables: - -Set Up the Environment and Build the Executables -=================================================== - -.. _DevBuild: - -``devbuild.sh`` Approach ------------------------------ - -On Level 1 systems for which a modulefile is provided under the ``modulefiles`` directory, users can build the SRW App binaries with: - -.. code-block:: console - - ./devbuild.sh --platform= - -where ```` is replaced with the name of the platform the user is working on. Valid values are: ``cheyenne`` | ``gaea`` | ``hera`` | ``jet`` | ``linux`` | ``macos`` | ``noaacloud`` | ``odin`` | ``orion`` | ``singularity`` | ``wcoss2`` - -.. note:: - Although build modulefiles exist for generic Linux and MacOS machines, users will need to alter these according to the instructions in Sections :numref:`%s ` & :numref:`%s `. It is recommended that users on these systems build the SRW App with the :ref:`CMake Approach ` instead. - -If compiler auto-detection fails for some reason, specify it using the ``--compiler`` argument. For example: - -.. code-block:: console - - ./devbuild.sh --platform=hera --compiler=intel - -where valid values are ``intel`` or ``gnu``. - -If users want to build the optional ``GSI`` and ``rrfs_utl`` components for RRFS (NOTE: These components are not currently available for use at runtime), they can add the ``--rrfs`` argument. For example: - -.. code-block:: console - - ./devbuild.sh --platform=hera --rrfs - -The last line of the console output should be ``[100%] Built target ufs-weather-model``, indicating that the UFS Weather Model executable has been built successfully. - -The executables listed in :numref:`Table %s ` should appear in the ``ufs-srweather-app/bin`` directory. If users choose to build the ``GSI`` and ``rrfs_utl`` components, the executables listed in :numref:`Table %s ` will also appear there. If this build method does not work, or if users are not on a supported machine, they will have to manually setup the environment and build the SRW App binaries with CMake as described in :numref:`Section %s `. - - -.. _ExecDescription: - -.. table:: Names and descriptions of the executables produced by the build step and used by the SRW App - - +------------------------+---------------------------------------------------------------------------------+ - | **Executable Name** | **Description** | - +========================+=================================================================================+ - | chgres_cube | Reads in raw external model (global or regional) and surface climatology data | - | | to create initial and lateral boundary conditions | - +------------------------+---------------------------------------------------------------------------------+ - | emcsfc_ice_blend | Blends National Ice Center sea ice cover and EMC sea ice concentration data to | - | | create a global sea ice analysis used to update the GFS once per day | - +------------------------+---------------------------------------------------------------------------------+ - | emcsfc_snow2mdl | Blends National Ice Center snow cover and Air Force snow depth data to create a | - | | global depth analysis used to update the GFS snow field once per day | - +------------------------+---------------------------------------------------------------------------------+ - | filter_topo | Filters topography based on resolution | - +------------------------+---------------------------------------------------------------------------------+ - | fregrid | Remaps data from the input mosaic grid to the output mosaic grid | - +------------------------+---------------------------------------------------------------------------------+ - | fvcom_to_FV3 | Determines lake surface conditions for the Great Lakes | - +------------------------+---------------------------------------------------------------------------------+ - | global_cycle | Updates the GFS surface conditions using external snow and sea ice analyses | - +------------------------+---------------------------------------------------------------------------------+ - | global_equiv_resol | Calculates a global, uniform, cubed-sphere equivalent resolution for the | - | | regional Extended Schmidt Gnomonic (ESG) grid | - +------------------------+---------------------------------------------------------------------------------+ - | inland | Creates an inland land mask by determining in-land (i.e. non-coastal) points | - | | and assigning a value of 1. Default value is 0. | - +------------------------+---------------------------------------------------------------------------------+ - | lakefrac | Calculates the ratio of the lake area to the grid cell area at each atmospheric | - | | grid point. | - +------------------------+---------------------------------------------------------------------------------+ - | make_hgrid | Computes geo-referencing parameters (e.g., latitude, longitude, grid cell area) | - | | for global uniform grids | - +------------------------+---------------------------------------------------------------------------------+ - | make_solo_mosaic | Creates mosaic files with halos | - +------------------------+---------------------------------------------------------------------------------+ - | orog | Generates orography, land mask, and gravity wave drag files from fixed files | - +------------------------+---------------------------------------------------------------------------------+ - | orog_gsl | Ceates orographic statistics fields required for the orographic drag suite | - | | developed by NOAA's Global Systems Laboratory (GSL) | - +------------------------+---------------------------------------------------------------------------------+ - | regional_esg_grid | Generates an ESG regional grid based on a user-defined namelist | - +------------------------+---------------------------------------------------------------------------------+ - | sfc_climo_gen | Creates surface climatology fields from fixed files for use in ``chgres_cube`` | - +------------------------+---------------------------------------------------------------------------------+ - | shave | Shaves the excess halo rows down to what is required for the lateral boundary | - | | conditions (LBC's) in the orography and grid files | - +------------------------+---------------------------------------------------------------------------------+ - | upp.x | Post-processor for the model output | - +------------------------+---------------------------------------------------------------------------------+ - | ufs_model | UFS Weather Model executable | - +------------------------+---------------------------------------------------------------------------------+ - | vcoord_gen | Generates hybrid coordinate interface profiles | - +------------------------+---------------------------------------------------------------------------------+ - -.. _RRFSexec: - -.. table:: Names and descriptions of the executables produced when the RRFS option is enabled - - +----------------------------+-----------------------------------------------------------------------------+ - | **Executable Name** | **Description** | - +============================+=============================================================================+ - | gsi.x | Runs the Gridpoint Statistical Interpolation (GSI). | - +----------------------------+-----------------------------------------------------------------------------+ - | enkf.x | Runs the Ensemble Kalman Filter. | - +----------------------------+-----------------------------------------------------------------------------+ - | gen_annual_maxmin_GVF.exe | Generate maximum and minimum GVF files based on yearly long GVF | - | | observations for update_GVF process. | - +----------------------------+-----------------------------------------------------------------------------+ - | update_GVF.exe | Update the greenness vegetation fraction (GVF) in the surface file based on | - | | the real-time observations files. | - +----------------------------+-----------------------------------------------------------------------------+ - | ref2tten.exe | Calculate temperature tendency based on the radar reflectivity observation | - | | at each grid point. This temperature tendency can be used by the model | - | | during integration as latent heating initialization for ongoing | - | | precipitation systems, especially convection. | - +----------------------------+-----------------------------------------------------------------------------+ - | update_ice.exe | Replace ice fields in warm start surface files based on the forecast from | - | | cold start forecast using the GFS as the initial file. | - +----------------------------+-----------------------------------------------------------------------------+ - | process_updatesst.exe | Update SST field based on the SST analysis from NCEP. | - +----------------------------+-----------------------------------------------------------------------------+ - | check_imssnow_fv3lam.exe | This is a tool used to read snow and ice fields from surface files and | - | | check that field. | - +----------------------------+-----------------------------------------------------------------------------+ - | gen_cs.exe | NCL scripts to do cross section plotting. | - +----------------------------+-----------------------------------------------------------------------------+ - | lakesurgery.exe | Replace the existing lake depth with the GLOBathy bathymetry. It is | - | | designed to work with the HRRR model. | - +----------------------------+-----------------------------------------------------------------------------+ - | process_imssnow_fv3lam.exe | Use FV3LAM snow and ice fields based on the snow and ice information from | - | | imssnow. | - +----------------------------+-----------------------------------------------------------------------------+ - | gen_ensmean_recenter.exe | Runs the ensemble mean/recentering calculation for FV3LAM ensemble files. | - +----------------------------+-----------------------------------------------------------------------------+ - | update_bc.exe | Adjust 0-h boundary conditions based on the analysis results during data | - | | assimilation cycling. | - +----------------------------+-----------------------------------------------------------------------------+ - | adjust_soiltq.exe | Use the lowest level temperature and moisture analysis increments to adjust | - | | the soil moisture and soil temperature after analysis. | - +----------------------------+-----------------------------------------------------------------------------+ - | process_NSSL_mosaic.exe | Process NSSL MRMS radar reflectivity mosaic observations: read 33 level | - | | NSSL MRMS radar reflectivity grib2 files and then interpolate the | - | | reflectivity horizontally to the ESG grid. | - +----------------------------+-----------------------------------------------------------------------------+ - | use_raphrrr_sfc.exe | Use RAP and HRRR surface fields to replace the surface fields in FV3LAM. | - | | This is only used for starting the RRFS surface cycling. | - +----------------------------+-----------------------------------------------------------------------------+ - | process_Lightning.exe | Processes lightning data: read NLDN NetCDF observation files and map the | - | | lightning observations into FV3LAM ESG grid. | - +----------------------------+-----------------------------------------------------------------------------+ - | process_larccld.exe | Process NASA Langley cloud top product: reads the cloud top pressure, | - | | temperature, etc and maps them to the ESG grid. | - +----------------------------+-----------------------------------------------------------------------------+ - | process_metarcld.exe | Process METAR ceilometers cloud observations: reads the cloud base and | - | | coverage observations from PrepBUFR and distributes the cloud | - | | observations/weather/visibility observations to the ESG grid. | - +----------------------------+-----------------------------------------------------------------------------+ - | fv3lam_nonvarcldana.exe | Runs the non-variational cloud and precipitable hydrometeor analysis based | - | | on the METAR cloud observations, satellite retrieved cloud top products, | - | | and radar reflectivity. | - +----------------------------+-----------------------------------------------------------------------------+ - - - -.. _CMakeApproach: - -CMake Approach ------------------ - -Set Up the Build Environment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. attention:: - * If users successfully built the executables in :numref:`Table %s `, they should skip to step :numref:`Step %s `. - * Users who want to build the SRW App on a generic MacOS should skip to :numref:`Section %s ` and follow the approach there. - -If the ``devbuild.sh`` approach failed, users need to set up their environment to run a workflow on their specific platform. First, users should make sure ``Lmod`` is the app used for loading modulefiles. This is the case on most Level 1 systems; however, on systems such as Gaea/Odin, the default modulefile loader is from Cray and must be switched to Lmod. For example, on Gaea, users can run one of the following two commands depending on whether they have a bash or csh shell, respectively: - -.. code-block:: console - - source etc/lmod-setup.sh gaea - source etc/lmod-setup.csh gaea - -If users execute one of the above commands on systems that don't need it, it will not cause any problems (it will simply do a ``module purge``). - -From here on, ``Lmod`` is ready to load the modulefiles needed by the SRW App. These modulefiles are located in the ``modulefiles`` directory. To load the necessary modulefile for a specific ```` using a given ````, run: - -.. code-block:: console - - module use - module load build__ - -where ```` is the full path to the ``modulefiles`` directory. - -This will work on Level 1 systems, where a modulefile is available in the ``modulefiles`` directory. On Level 2-4 systems (including generic Linux/MacOS systems), users will need to modify certain environment variables, such as the path to HPC-Stack, so that the SRW App can find and load the appropriate modules. For systems with Lmod installed, one of the current ``build__`` modulefiles can be copied and used as a template. To check whether Lmod is installed, run ``echo $LMOD_PKG``, and see if it outputs a path to the Lmod package. On systems without Lmod, users can modify or set the required environment variables with the ``export`` or ``setenv`` commands, depending on whether they are using a bash or csh/tcsh shell, respectively: - -.. code-block:: - - export = - setenv - -Note that building the SRW App without Lmod is not supported at this time. It should be possible to do so, but it has not been tested. Users are encouraged to install Lmod on their system. - -.. _BuildCMake: - -Build the Executables Using CMake -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -After setting up the build environment in the preceding section (by loading the ``build__`` modulefile), users need to build the executables required to run the SRW App. In the ``ufs-srweather-app`` directory, create a subdirectory to hold the build's executables: - -.. code-block:: console - - mkdir build - cd build - -From the build directory, run the following commands to build the pre-processing utilities, forecast model, and post-processor: - -.. code-block:: console - - cmake .. -DCMAKE_INSTALL_PREFIX=.. - make -j 4 >& build.out & - -``-DCMAKE_INSTALL_PREFIX`` specifies the location in which the ``bin``, ``include``, ``lib``, and ``share`` directories will be created. These directories will contain various components of the SRW App. Its recommended value ``..`` denotes one directory up from the build directory. In the next line, the ``make`` call argument ``-j 4`` indicates that the build will run in parallel with 4 threads. Although users can specify a larger or smaller number of threads (e.g., ``-j8``, ``-j2``), it is highly recommended to use at least 4 parallel threads to prevent overly long installation times. - -If users want to build the optional ``GSI`` and ``rrfs_utl`` components for RRFS (NOTE: These components are not currently available for use at runtime), they can add ``ENABLE_RRFS=on`` to the original cmake command. For example: - -.. code-block:: console - - cmake .. -DCMAKE_INSTALL_PREFIX=.. ENABLE_RRFS=on - make -j 4 >& build.out & - -The build will take a few minutes to complete. When it starts, a random number is printed to the console, and when it is done, a ``[1]+ Done`` message is printed to the console. ``[1]+ Exit`` indicates an error. Output from the build will be in the ``ufs-srweather-app/build/build.out`` file. When the build completes, users should see the forecast model executable ``ufs_model`` and several pre- and post-processing executables in the ``ufs-srweather-app/bin`` directory. These executables are described in :numref:`Table %s `. - -.. hint:: - - If you see the ``build.out`` file, but there is no ``ufs-srweather-app/bin`` directory, wait a few more minutes for the build to complete. - -.. _MacDetails: - -Additional Details for Building on MacOS ------------------------------------------- - -.. note:: - Users who are **not** building the SRW App on a MacOS machine may skip to the :ref:`next section `. - -The SRW App can be built on MacOS machines, presuming HPC-Stack has already been installed successfully. The following two options have been tested: - -* **Option 1:** MacBookAir 2020, M1 chip (arm64, running natively), 4+4 cores, Big Sur 11.6.4, GNU compiler suite v.11.2.0_3 (gcc, gfortran, g++); no MPI pre-installed - -* **Option 2:** MacBook Pro 2015, 2.8 GHz Quad-Core Intel Core i7 (x86_64), Catalina OS X 10.15.7, GNU compiler suite v.11.2.0_3 (gcc, gfortran, g++); no MPI pre-installed - -The ``build_macos_gnu`` modulefile initializes the module environment, lists the location of HPC-Stack modules, loads the meta-modules and modules, and sets compilers, additional flags, and environment variables needed for building the SRW App. The modulefile must be modified to include the absolute path to the user's HPC-Stack installation and ``ufs-srweather-app`` directories. In particular, the following section must be modified: - -.. code-block:: console - - # This path should point to your HPCstack installation directory - setenv HPCstack "/Users/username/hpc-stack/install" - - # This path should point to your SRW Application directory - setenv SRW "/Users/username/ufs-srweather-app" - -An excerpt of the ``build_macos_gnu`` contents appears below for Option 1. To use Option 2, the user will need to comment out the lines specific to Option 1 and uncomment the lines specific to Option 2 in the ``build_macos_gnu`` modulefile. Additionally, users need to verify that all file paths reflect their system's configuration and that the correct version numbers for software libraries appear in the modulefile. - -.. code-block:: console - - # Option 1 compiler paths: - setenv CC "/opt/homebrew/bin/gcc" - setenv FC "/opt/homebrew/bin/gfortran" - setenv CXX "/opt/homebrew/bin/g++" - - # Option 2 compiler paths: - #setenv CC "/usr/local/bin/gcc" - #setenv FC "/usr/local/bin/gfortran" - #setenv CXX "/usr/local/bin/g++" - -Then, users must source the Lmod setup file, just as they would on other systems, and load the modulefiles needed for building and running the SRW App: - -.. code-block:: console - - source etc/lmod-setup.sh macos - module use - module load build_macos_gnu - export LDFLAGS="-L${MPI_ROOT}/lib" - -In a csh/tcsh shell, users would run ``source etc/lmod-setup.csh macos`` in place of the first line in the code block above. - -Additionally, for Option 1 systems, set the variable ``ENABLE_QUAD_PRECISION`` to ``OFF`` in the ``$SRW/src/ufs-weather-model/FV3/atmos_cubed_sphere/CMakeLists.txt`` file. This change is optional if using Option 2 to build the SRW App. To make this change using a streamline editor (`sed`), run: - -.. code-block:: console - - sed -i .bak 's/QUAD_PRECISION\" ON)/QUAD_PRECISION\" OFF)/' $SRW/src/ufs-weather-model/FV3/atmos_cubed_sphere/CMakeLists.txt - -Proceed to building the executables using the process outlined in :numref:`Step %s `. - - -.. _Data: - -Download and Stage the Data -============================ - -The SRW App requires input files to run. These include static datasets, initial and boundary conditions files, and model configuration files. On Level 1 systems, the data required to run SRW App tests are already available. For Level 2-4 systems, the data must be added. Detailed instructions on how to add the data can be found in :numref:`Section %s `. Sections :numref:`%s ` and :numref:`%s ` contain useful background information on the input and output files used in the SRW App. - -.. _GridSpecificConfig: - -Grid Configuration -======================= - -The SRW App officially supports four different predefined grids as shown in :numref:`Table %s `. The out-of-the-box SRW App case uses the ``RRFS_CONUS_25km`` predefined grid option. More information on the predefined and user-generated grid options can be found in :numref:`Chapter %s ` for those who are curious. Users who plan to utilize one of the four predefined domain (grid) options may continue to :numref:`Step %s `. Users who plan to create a new domain should refer to :numref:`Section %s ` for details on how to do so. At a minimum, these users will need to add the new grid name to the ``valid_param_vals.sh`` script and add the corresponding grid-specific parameters in the ``set_predef_grid_params.sh`` script. - -.. _PredefinedGrids: - -.. table:: Predefined grids in the SRW App - - +----------------------+-------------------+--------------------------------+ - | **Grid Name** | **Grid Type** | **Quilting (write component)** | - +======================+===================+================================+ - | RRFS_CONUS_25km | ESG grid | lambert_conformal | - +----------------------+-------------------+--------------------------------+ - | RRFS_CONUS_13km | ESG grid | lambert_conformal | - +----------------------+-------------------+--------------------------------+ - | RRFS_CONUS_3km | ESG grid | lambert_conformal | - +----------------------+-------------------+--------------------------------+ - | SUBCONUS_Ind_3km | ESG grid | lambert_conformal | - +----------------------+-------------------+--------------------------------+ - - -.. _GenerateForecast: - -Generate the Forecast Experiment -================================= -Generating the forecast experiment requires three steps: - -#. :ref:`Set experiment parameters ` -#. :ref:`Set Python and other environment parameters ` -#. :ref:`Run a script to generate the experiment workflow ` - -The first two steps depend on the platform being used and are described here for each Level 1 platform. Users will need to adjust the instructions to reflect their machine configuration if they are working on a Level 2-4 platform. Information in :numref:`Chapter %s: Configuring the Workflow ` can help with this. - -.. _ExptConfig: - -Set Experiment Parameters ----------------------------- - -Each experiment requires certain basic information to run (e.g., date, grid, physics suite). This information is specified in ``config_defaults.sh`` and in the user-specified ``config.sh`` file. When generating a new experiment, the SRW App first reads and assigns default values from the ``config_defaults.sh`` file. Then, it reads and (re)assigns variables from the user's custom ``config.sh`` file. - -For background info on ``config_defaults.sh``, read :numref:`Section %s `, or jump to :numref:`Section %s ` to continue configuring the experiment. - -.. _DefaultConfigSection: - -Default configuration: ``config_defaults.sh`` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. note:: - This section provides background information on how the SRW App uses the ``config_defaults.sh`` file. It is informative, but users do not need to modify ``config_defaults.sh`` to run the out-of-the-box case for the SRW App. Therefore, users may skip to :numref:`Step %s ` to continue configuring their experiment. - -Configuration variables in the ``config_defaults.sh`` file appear in :numref:`Table %s `. Some of these default values are intentionally invalid in order to ensure that the user assigns valid values in the user-specified ``config.sh`` file. Any settings provided in ``config.sh`` will override the ``config_defaults.sh`` -settings. There is usually no need for a user to modify the default configuration file. Additional information on the default settings can be found in the file itself and in :numref:`Chapter %s `. - -.. _ConfigVarsDefault: - -.. table:: Configuration variables specified in the config_defaults.sh script. - - +----------------------+--------------------------------------------------------------+ - | **Group Name** | **Configuration variables** | - +======================+==============================================================+ - | Experiment mode | RUN_ENVIR | - +----------------------+--------------------------------------------------------------+ - | Machine and queue | MACHINE, MACHINE_FILE, ACCOUNT, COMPILER, SCHED, | - | | LMOD_PATH, NCORES_PER_NODE, BUILD_MOD_FN, WFLOW_MOD_FN, | - | | PARTITION_DEFAULT, CLUSTERS_DEFAULT, QUEUE_DEFAULT, | - | | PARTITION_HPSS, CLUSTERS_HPSS, QUEUE_HPSS, PARTITION_FCST, | - | | CLUSTERS_FCST, QUEUE_FCST | - +----------------------+--------------------------------------------------------------+ - | Workflow management | WORKFLOW_MANAGER, RUN_CMD_UTILS, RUN_CMD_FCST, RUN_CMD_POST | - +----------------------+--------------------------------------------------------------+ - | Cron | USE_CRON_TO_RELAUNCH, CRON_RELAUNCH_INTVL_MNTS | - +----------------------+--------------------------------------------------------------+ - | Directory parameters | EXPT_BASEDIR, EXPT_SUBDIR, EXEC_SUBDIR | - +----------------------+--------------------------------------------------------------+ - | NCO mode | COMINgfs, FIXLAM_NCO_BASEDIR, STMP, NET, envir, RUN, PTMP | - +----------------------+--------------------------------------------------------------+ - | Separator | DOT_OR_USCORE | - +----------------------+--------------------------------------------------------------+ - | File name | EXPT_CONFIG_FN, RGNL_GRID_NML_FN, DATA_TABLE_FN, | - | | DIAG_TABLE_FN, FIELD_TABLE_FN, FV3_NML_BASE_SUITE_FN, | - | | FV3_NML_YAML_CONFIG_FN, FV3_NML_BASE_ENS_FN, | - | | MODEL_CONFIG_FN, NEMS_CONFIG_FN, FV3_EXEC_FN, | - | | FCST_MODEL, WFLOW_XML_FN, GLOBAL_VAR_DEFNS_FN, | - | | EXTRN_MDL_ICS_VAR_DEFNS_FN, EXTRN_MDL_LBCS_VAR_DEFNS_FN, | - | | WFLOW_LAUNCH_SCRIPT_FN, WFLOW_LAUNCH_LOG_FN | - +----------------------+--------------------------------------------------------------+ - | Forecast | DATE_FIRST_CYCL, DATE_LAST_CYCL, INCR_CYCL_FREQ, | - | | FCST_LEN_HRS | - +----------------------+--------------------------------------------------------------+ - | IC/LBC | EXTRN_MDL_NAME_ICS, EXTRN_MDL_NAME_LBCS, | - | | LBC_SPEC_INTVL_HRS, EXTRN_MDL_ICS_OFFSET_HRS, | - | | EXTRN_MDL_LBCS_OFFSET_HRS, FV3GFS_FILE_FMT_ICS, | - | | FV3GFS_FILE_FMT_LBCS | - +----------------------+--------------------------------------------------------------+ - | NOMADS | NOMADS, NOMADS_file_type | - +----------------------+--------------------------------------------------------------+ - | External model | EXTRN_MDL_SYSBASEDIR_ICS, EXTRN_MDL_SYSBASEDIR_LBCS, | - | | USE_USER_STAGED_EXTRN_FILES, EXTRN_MDL_SOURCE_BASEDIR_ICS, | - | | EXTRN_MDL_FILES_ICS, EXTRN_MDL_SOURCE_BASEDIR_LBCS, | - | | EXTRN_MDL_FILES_LBCS | - +----------------------+--------------------------------------------------------------+ - | CCPP | CCPP_PHYS_SUITE | - +----------------------+--------------------------------------------------------------+ - | Stochastic physics | NEW_LSCALE, DO_SHUM, DO_SPPT, DO_SKEB, DO_SPP, DO_LSM_SPP, | - | | ISEED_SHUM, SHUM_MAG, SHUM_LSCALE, SHUM_TSCALE, SHUM_INT, | - | | ISEED_SPPT, SPPT_MAG, SPPT_LOGIT, SPPT_LSCALE, SPPT_TSCALE, | - | | SPPT_INT, SPPT_SFCLIMIT, USE_ZMTNBLCK, ISEED_SKEB, | - | | SKEB_MAG, SKEB_LSCALE, SKEP_TSCALE, SKEB_INT, SKEBNORM, | - | | SKEB_VDOF, ISEED_SPP, SPP_MAG_LIST, SPP_LSCALE, SPP_TSCALE, | - | | SPP_SIGTOP1, SPP_SIGTOP2, SPP_STDDEV_CUTOFF, SPP_VAR_LIST, | - | | LSM_SPP_TSCALE, LSM_SPP_LSCALE, ISEED_LSM_SPP, | - | | LSM_SPP_VAR_LIST, LSM_SPP_MAG_LIST, LSM_SPP_EACH_STEP | - +----------------------+--------------------------------------------------------------+ - | GRID | GRID_GEN_METHOD, PREDEF_GRID_NAME | - +----------------------+--------------------------------------------------------------+ - | ESG grid | ESGgrid_LON_CTR, ESGgrid_LAT_CTR, ESGgrid_DELX, | - | | ESGgrid_DELY, ESGgrid_NX, ESGgrid_NY, ESGgrid_PAZI | - | | ESGgrid_WIDE_HALO_WIDTH | - +----------------------+--------------------------------------------------------------+ - | GFDL grid | GFDLgrid_LON_T6_CTR, GFDLgrid_LAT_T6_CTR, GFDLgrid_RES, | - | | GFDLgrid_STRETCH_FAC, GFDLgrid_REFINE_RATIO, | - | | GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G, | - | | GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G, | - | | GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G, | - | | GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G, | - | | GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES | - +----------------------+--------------------------------------------------------------+ - | Input configuration | DT_ATMOS, RESTART_INTERVAL, WRITE_DOPOST, LAYOUT_X, | - | | LAYOUT_Y, BLOCKSIZE, QUILTING, | - | | PRINT_ESMF, WRTCMP_write_groups, | - | | WRTCMP_write_tasks_per_group, WRTCMP_output_grid, | - | | WRTCMP_cen_lon, WRTCMP_cen_lat, WRTCMP_lon_lwr_left, | - | | WRTCMP_lat_lwr_left, WRTCMP_lon_upr_rght, | - | | WRTCMP_lat_upr_rght, WRTCMP_dlon, WRTCMP_dlat, | - | | WRTCMP_stdlat1, WRTCMP_stdlat2, WRTCMP_nx, WRTCMP_ny, | - | | WRTCMP_dx, WRTCMP_dy | - +----------------------+--------------------------------------------------------------+ - | Experiment generation| PREEXISTING_DIR_METHOD, VERBOSE, DEBUG | - +----------------------+--------------------------------------------------------------+ - | Cycle-independent | RUN_TASK_MAKE_GRID, GRID_DIR, RUN_TASK_MAKE_OROG, | - | | OROG_DIR, RUN_TASK_MAKE_SFC_CLIMO, SFC_CLIMO_DIR | - +----------------------+--------------------------------------------------------------+ - | Cycle dependent | RUN_TASK_GET_EXTRN_ICS, RUN_TASK_GET_EXTRN_LBCS, | - | | RUN_TASK_MAKE_ICS, RUN_TASK_MAKE_LBCS, RUN_TASK_RUN_FCST, | - | | RUN_TASK_RUN_POST | - +----------------------+--------------------------------------------------------------+ - | VX run tasks | RUN_TASK_GET_OBS_CCPA, RUN_TASK_GET_OBS_MRMS, | - | | RUN_TASK_GET_OBS_NDAS, RUN_TASK_VX_GRIDSTAT, | - | | RUN_TASK_VX_POINTSTAT, RUN_TASK_VX_ENSGRID, | - | | RUN_TASK_VX_ENSPOINT | - +----------------------+--------------------------------------------------------------+ - | Fixed File Parameters| FIXgsm, FIXaer, FIXlut, TOPO_DIR, SFC_CLIMO_INPUT_DIR, | - | | FNGLAC, FNMXIC, FNTSFC, FNSNOC, FNZORC, | - | | FNAISC, FNSMCC, FNMSKH, FIXgsm_FILES_TO_COPY_TO_FIXam, | - | | FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING, | - | | FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING, | - | | CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING | - +----------------------+--------------------------------------------------------------+ - | Workflow tasks | MAKE_GRID_TN, MAKE_OROG_TN, MAKE_SFC_CLIMO_TN, | - | | GET_EXTRN_ICS_TN, GET_EXTRN_LBCS_TN, MAKE_ICS_TN, | - | | MAKE_LBCS_TN, RUN_FCST_TN, RUN_POST_TN | - +----------------------+--------------------------------------------------------------+ - | Verification tasks | GET_OBS, GET_OBS_CCPA_TN, GET_OBS_MRMS_TN, GET_OBS_NDAS_TN, | - | | VX_TN, VX_GRIDSTAT_TN, VX_GRIDSTAT_REFC_TN, | - | | VX_GRIDSTAT_RETOP_TN, VX_GRIDSTAT_##h_TN, VX_POINTSTAT_TN, | - | | VX_ENSGRID_TN, VX_ENSGRID_##h_TN, VX_ENSGRID_REFC_TN, | - | | VX_ENSGRID_RETOP_TN, VX_ENSGRID_MEAN_TN, VX_ENSGRID_PROB_TN, | - | | VX_ENSGRID_MEAN_##h_TN, VX_ENSGRID_PROB_03h_TN, | - | | VX_ENSGRID_PROB_REFC_TN, VX_ENSGRID_PROB_RETOP_TN, | - | | VX_ENSPOINT_TN, VX_ENSPOINT_MEAN_TN, VX_ENSPOINT_PROB_TN | - +----------------------+--------------------------------------------------------------+ - | NODE | NNODES_MAKE_GRID, NNODES_MAKE_OROG, NNODES_MAKE_SFC_CLIMO, | - | | NNODES_GET_EXTRN_ICS, NNODES_GET_EXTRN_LBCS, | - | | NNODES_MAKE_ICS, NNODES_MAKE_LBCS, NNODES_RUN_FCST, | - | | NNODES_RUN_POST, NNODES_GET_OBS_CCPA, NNODES_GET_OBS_MRMS, | - | | NNODES_GET_OBS_NDAS, NNODES_VX_GRIDSTAT, | - | | NNODES_VX_POINTSTAT, NNODES_VX_ENSGRID, | - | | NNODES_VX_ENSGRID_MEAN, NNODES_VX_ENSGRID_PROB, | - | | NNODES_VX_ENSPOINT, NNODES_VX_ENSPOINT_MEAN, | - | | NNODES_VX_ENSPOINT_PROB | - +----------------------+--------------------------------------------------------------+ - | MPI processes | PPN_MAKE_GRID, PPN_MAKE_OROG, PPN_MAKE_SFC_CLIMO, | - | | PPN_GET_EXTRN_ICS, PPN_GET_EXTRN_LBCS, PPN_MAKE_ICS, | - | | PPN_MAKE_LBCS, PPN_RUN_FCST, PPN_RUN_POST, | - | | PPN_GET_OBS_CCPA, PPN_GET_OBS_MRMS, PPN_GET_OBS_NDAS, | - | | PPN_VX_GRIDSTAT, PPN_VX_POINTSTAT, PPN_VX_ENSGRID, | - | | PPN_VX_ENSGRID_MEAN, PPN_VX_ENSGRID_PROB, PPN_VX_ENSPOINT, | - | | PPN_VX_ENSPOINT_MEAN, PPN_VX_ENSPOINT_PROB | - +----------------------+--------------------------------------------------------------+ - | Walltime | WTIME_MAKE_GRID, WTIME_MAKE_OROG, WTIME_MAKE_SFC_CLIMO, | - | | WTIME_GET_EXTRN_ICS, WTIME_GET_EXTRN_LBCS, WTIME_MAKE_ICS, | - | | WTIME_MAKE_LBCS, WTIME_RUN_FCST, WTIME_RUN_POST, | - | | WTIME_GET_OBS_CCPA, WTIME_GET_OBS_MRMS, WTIME_GET_OBS_NDAS, | - | | WTIME_VX_GRIDSTAT, WTIME_VX_POINTSTAT, WTIME_VX_ENSGRID, | - | | WTIME_VX_ENSGRID_MEAN, WTIME_VX_ENSGRID_PROB, | - | | WTIME_VX_ENSPOINT, WTIME_VX_ENSPOINT_MEAN, | - | | WTIME_VX_ENSPOINT_PROB | - +----------------------+--------------------------------------------------------------+ - | Maximum attempt | MAXTRIES_MAKE_GRID, MAXTRIES_MAKE_OROG, | - | | MAXTRIES_MAKE_SFC_CLIMO, MAXTRIES_GET_EXTRN_ICS, | - | | MAXTRIES_GET_EXTRN_LBCS, MAXTRIES_MAKE_ICS, | - | | MAXTRIES_MAKE_LBCS, MAXTRIES_RUN_FCST, MAXTRIES_RUN_POST, | - | | MAXTRIES_GET_OBS_CCPA, MAXTRIES_GET_OBS_MRMS, | - | | MAXTRIES_GET_OBS_NDAS, MAXTRIES_VX_GRIDSTAT, | - | | MAXTRIES_VX_GRIDSTAT_REFC, MAXTRIES_VX_GRIDSTAT_RETOP, | - | | MAXTRIES_VX_GRIDSTAT_##h, MAXTRIES_VX_POINTSTAT, | - | | MAXTRIES_VX_ENSGRID, MAXTRIES_VX_ENSGRID_REFC, | - | | MAXTRIES_VX_ENSGRID_RETOP, MAXTRIES_VX_ENSGRID_##h, | - | | MAXTRIES_VX_ENSGRID_MEAN, MAXTRIES_VX_ENSGRID_PROB, | - | | MAXTRIES_VX_ENSGRID_MEAN_##h, MAXTRIES_VX_ENSGRID_PROB_##h, | - | | MAXTRIES_VX_ENSGRID_PROB_REFC, | - | | MAXTRIES_VX_ENSGRID_PROB_RETOP, MAXTRIES_VX_ENSPOINT, | - | | MAXTRIES_VX_ENSPOINT_MEAN, MAXTRIES_VX_ENSPOINT_PROB | - +----------------------+--------------------------------------------------------------+ - | Climatology | SFC_CLIMO_FIELDS, USE_MERRA_CLIMO | - +----------------------+--------------------------------------------------------------+ - | CRTM | USE_CRTM, CRTM_DIR | - +----------------------+--------------------------------------------------------------+ - | Post configuration | USE_CUSTOM_POST_CONFIG_FILE, CUSTOM_POST_CONFIG_FP, | - | | SUB_HOURLY_POST, DT_SUB_HOURLY_POST_MNTS | - +----------------------+--------------------------------------------------------------+ - | METplus | MODEL, MET_INSTALL_DIR, MET_BIN_EXEC, METPLUS_PATH, | - | | CCPA_OBS_DIR, MRMS_OBS_DIR, NDAS_OBS_DIR | - +----------------------+--------------------------------------------------------------+ - | Running ensembles | DO_ENSEMBLE, NUM_ENS_MEMBERS | - +----------------------+--------------------------------------------------------------+ - | Boundary blending | HALO_BLEND | - +----------------------+--------------------------------------------------------------+ - | FVCOM | USE_FVCOM, FVCOM_WCSTART, FVCOM_DIR, FVCOM_FILE | - +----------------------+--------------------------------------------------------------+ - | Thread Affinity | KMP_AFFINITY_*, OMP_NUM_THREADS_*, OMP_STACKSIZE_* | - +----------------------+--------------------------------------------------------------+ - - -.. _UserSpecificConfig: - -User-specific configuration: ``config.sh`` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The user must specify certain basic information about the experiment in a ``config.sh`` file located in the ``ufs-srweather-app/regional_workflow/ush`` directory. Two example templates are provided in that directory: ``config.community.sh`` and ``config.nco.sh``. The first file is a minimal example for creating and running an experiment in the *community* mode (with ``RUN_ENVIR`` set to ``community``). The second is an example for creating and running an experiment in the *NCO* (operational) mode (with ``RUN_ENVIR`` set to ``nco``). The *community* mode is recommended in most cases and is fully supported for this release. The operational/NCO mode is typically used by those at the NOAA/NCEP/Environmental Modeling Center (EMC) and the NOAA/Global Systems Laboratory (GSL) working on pre-implementation testing for the Rapid Refresh Forecast System (RRFS). :numref:`Table %s ` shows the configuration variables that appear in the ``config.community.sh``, along with their default values in ``config_default.sh`` and the values defined in ``config.community.sh``. - -.. _ConfigCommunity: - -.. table:: Configuration variables specified in the config.community.sh script - - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | **Parameter** | **Default Value** | **config.community.sh Value** | - +================================+===================+==================================================================================+ - | MACHINE | "BIG_COMPUTER" | "hera" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | ACCOUNT | "project_name" | "an_account" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | EXPT_SUBDIR | "" | "test_CONUS_25km_GFSv16" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | COMPILER | "intel" | "intel" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | VERBOSE | "TRUE" | "TRUE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_ENVIR | "nco" | "community" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | PREEXISTING_DIR_METHOD | "delete" | "rename" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | PREDEF_GRID_NAME | "" | "RRFS_CONUS_25km" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | DO_ENSEMBLE | "FALSE" | "FALSE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | NUM_ENS_MEMBERS | "1" | "2" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | QUILTING | "TRUE" | "TRUE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | CCPP_PHYS_SUITE | "FV3_GFS_v16" | "FV3_GFS_v16" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | FCST_LEN_HRS | "24" | "12" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | LBC_SPEC_INTVL_HRS | "6" | "6" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | DATE_FIRST_CYCL | "YYYYMMDDHH" | "2019061518" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | DATE_LAST_CYCL | "YYYYMMDDHH" | "2019061518" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | EXTRN_MDL_NAME_ICS | "FV3GFS" | "FV3GFS" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | EXTRN_MDL_NAME_LBCS | "FV3GFS" | "FV3GFS" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | FV3GFS_FILE_FMT_ICS | "nemsio" | "grib2" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | FV3GFS_FILE_FMT_LBCS | "nemsio" | "grib2" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | WTIME_RUN_FCST | "04:30:00" | "02:00:00" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | USE_USER_STAGED_EXTRN_FILES | "FALSE" | "TRUE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | EXTRN_MDL_SOURCE_BASEDIR_ICS | "" | "/scratch2/BMC/det/UFS_SRW_App/develop/input_model_data/FV3GFS/grib2/2019061518" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | EXTRN_MDL_FILES_ICS | "" | "gfs.pgrb2.0p25.f000" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | EXTRN_MDL_SOURCE_BASEDIR_LBCS | "" | "/scratch2/BMC/det/UFS_SRW_App/develop/input_model_data/FV3GFS/grib2/2019061518" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | EXTRN_MDL_FILES_LBCS | "" | "gfs.pgrb2.0p25.f006" "gfs.pgrb2.0p25.f012" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | MODEL | "" | FV3_GFS_v16_CONUS_25km" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | METPLUS_PATH | "" | "/path/to/METPlus" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | MET_INSTALL_DIR | "" | "/path/to/MET" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | CCPA_OBS_DIR | "" | "/path/to/processed/CCPA/data" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | MRMS_OBS_DIR | "" | "/path/to/processed/MRMS/data" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | NDAS_OBS_DIR | "" | "/path/to/processed/NDAS/data" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_TASK_MAKE_GRID | "TRUE" | "TRUE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_TASK_MAKE_OROG | "TRUE" | "TRUE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_TASK_MAKE_SFC_CLIMO | "TRUE" | "TRUE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_TASK_GET_OBS_CCPA | "FALSE" | "FALSE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_TASK_GET_OBS_MRMS | "FALSE" | "FALSE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_TASK_GET_OBS_NDAS | "FALSE" | "FALSE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_TASK_VX_GRIDSTAT | "FALSE" | "FALSE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_TASK_VX_POINTSTAT | "FALSE" | "FALSE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_TASK_VX_ENSGRID | "FALSE" | "FALSE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - | RUN_TASK_VX_ENSPOINT | "FALSE" | "FALSE" | - +--------------------------------+-------------------+----------------------------------------------------------------------------------+ - - -To get started, make a copy of ``config.community.sh``. From the ``ufs-srweather-app`` directory, run: - -.. code-block:: console - - cd $SRW/regional_workflow/ush - cp config.community.sh config.sh - -The default settings in this file include a predefined 25-km :term:`CONUS` grid (RRFS_CONUS_25km), the :term:`GFS` v16 physics suite (FV3_GFS_v16 :term:`CCPP`), and :term:`FV3`-based GFS raw external model data for initialization. - -Next, edit the new ``config.sh`` file to customize it for your machine. At a minimum, change the ``MACHINE`` and ``ACCOUNT`` variables; then choose a name for the experiment directory by setting ``EXPT_SUBDIR``. If you have pre-staged initialization data for the experiment, set ``USE_USER_STAGED_EXTRN_FILES="TRUE"``, and set the paths to the data for ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS``. If the modulefile used to set up the build environment in :numref:`Section %s ` uses a GNU compiler, check that the line ``COMPILER="gnu"`` appears in the ``config.sh`` file. On platforms where Rocoto and :term:`cron` are available, users can automate resubmission of their experiment workflow by adding the following lines to the ``config.sh`` file: - -.. code-block:: console - - USE_CRON_TO_RELAUNCH="TRUE" - CRON_RELAUNCH_INTVL_MNTS="03" - -.. note:: - - Generic Linux and MacOS users should refer to :numref:`Section %s ` for additional details on configuring an experiment and python environment. - -Sample ``config.sh`` settings are indicated below for Level 1 platforms. Detailed guidance applicable to all systems can be found in :numref:`Chapter %s: Configuring the Workflow `, which discusses each variable and the options available. Additionally, information about the four predefined Limited Area Model (LAM) Grid options can be found in :numref:`Chapter %s: Limited Area Model (LAM) Grids `. - -.. hint:: - - To determine an appropriate ACCOUNT field for Level 1 systems, run ``groups``, and it will return a list of projects you have permissions for. Not all of the listed projects/groups have an HPC allocation, but those that do are potentially valid account names. - -Minimum parameter settings for running the out-of-the-box SRW App case on Level 1 machines: - -.. _SystemData: - -**Cheyenne:** - -.. code-block:: console - - MACHINE="cheyenne" - ACCOUNT="" - EXPT_SUBDIR="" - USE_USER_STAGED_EXTRN_FILES="TRUE" - EXTRN_MDL_SOURCE_BASEDIR_ICS="/glade/p/ral/jntp/UFS_SRW_App/develop/input_model_data///" - EXTRN_MDL_SOURCE_BASEDIR_LBCS="/glade/p/ral/jntp/UFS_SRW_App/develop/input_model_data///" - -where: - * ```` refers to a valid account name. - * ```` is an experiment name of the user's choice. - * ```` refers to a subdirectory, such as "FV3GFS" or "HRRR", containing the experiment data. - * ```` refers to one of 3 possible data formats: ``grib2``, ``nemsio``, or ``netcdf``. - * ```` refers to a subdirectory containing data for the :term:`cycle` date (in YYYYMMDDHH format). - - -**Hera, Jet, Orion, Gaea:** - -The ``MACHINE``, ``ACCOUNT``, and ``EXPT_SUBDIR`` settings are the same as for Cheyenne, except that ``"cheyenne"`` should be switched to ``"hera"``, ``"jet"``, ``"orion"``, or ``"gaea"``, respectively. Set ``USE_USER_STAGED_EXTRN_FILES="TRUE"``, but replace the file paths to Cheyenne's data with the file paths for the correct machine. ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS`` use the same base file path. - -On Hera: - -.. code-block:: console - - "/scratch2/BMC/det/UFS_SRW_App/develop/input_model_data////" - -On Jet: - -.. code-block:: console - - "/mnt/lfs4/BMC/wrfruc/UFS_SRW_App/develop/input_model_data////" - -On Orion: - -.. code-block:: console - - "/work/noaa/fv3-cam/UFS_SRW_App/develop/input_model_data////" - -On Gaea: - -.. code-block:: console - - "/lustre/f2/pdata/ncep/UFS_SRW_App/develop/input_model_data////" - -On **WCOSS** systems, edit ``config.sh`` with these WCOSS-specific parameters, and use a valid WCOSS project code for the account parameter: - -.. code-block:: console - - MACHINE="wcoss2" - ACCOUNT="valid_wcoss_project_code" - EXPT_SUBDIR="my_expt_name" - USE_USER_STAGED_EXTRN_FILES="TRUE" - -On WCOSS2: - -.. code-block:: console - - EXTRN_MDL_SOURCE_BASEDIR_ICS="/lfs/h2/emc/lam/noscrub/UFS_SRW_App/develop/input_model_data///YYYYMMDDHH/ICS" - EXTRN_MDL_SOURCE_BASEDIR_LBCS="/lfs/h2/emc/lam/noscrub/UFS_SRW_App/develop/input_model_data///YYYYMMDDHH/LBCS" - -On NOAA Cloud Systems: - -.. code-block:: console - - MACHINE="NOAACLOUD" - ACCOUNT="none" - EXPT_SUBDIR="" - USE_USER_STAGED_EXTRN_FILES="TRUE" - EXTRN_MDL_SOURCE_BASEDIR_ICS="/contrib/EPIC/UFS_SRW_App/develop/input_model_data////" - EXTRN_MDL_FILES_ICS=( "gfs.t18z.pgrb2.0p25.f000" ) - EXTRN_MDL_SOURCE_BASEDIR_LBCS="/contrib/EPIC/UFS_SRW_App/develop/input_model_data////" - EXTRN_MDL_FILES_LBCS=( "gfs.t18z.pgrb2.0p25.f006" "gfs.t18z.pgrb2.0p25.f012" ) - -.. note:: - - The values of the configuration variables should be consistent with those in the - ``valid_param_vals.sh`` script. In addition, various sample configuration files can be found in the ``regional_workflow/tests/baseline_configs`` directory. - - -To configure an experiment and python environment for a general Linux or Mac system, see the :ref:`next section `. To configure an experiment to run METplus verification tasks, see :numref:`Section %s `. Otherwise, skip to :numref:`Section %s `. - -.. _LinuxMacEnvConfig: - -User-specific Configuration on a General Linux/MacOS System -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The configuration process for Linux and MacOS systems is similar to the process for other systems, but it requires a few extra steps. - -.. note:: - Examples in this subsection presume that the user is running Terminal.app with a bash shell environment. If this is not the case, users will need to adjust the commands to fit their command line application and shell environment. - -.. _MacMorePackages: - -Install/Upgrade Mac-Specific Packages -```````````````````````````````````````` -MacOS requires the installation of a few additional packages and, possibly, an upgrade to bash. Users running on MacOS should execute the following commands: - -.. code-block:: console - - bash --version - brew upgrade bash - brew install coreutils - brew gsed - -.. _LinuxMacVEnv: - -Creating a Virtual Environment on Linux and Mac -`````````````````````````````````````````````````` - -Users should ensure that the following packages are installed and up-to-date: - -.. code-block:: console - - python3 -m pip --version - python3 -m pip install --upgrade pip - python3 -m ensurepip --default-pip - python3 -m pip install ruby OR(on MacOS only): brew install ruby - -Users must create a virtual environment (``regional_workflow``), store it in their ``$HOME/venv/`` directory, and install additional python packages: - -.. code-block:: console - - [[ -d $HOME/venv ]] | mkdir -p $HOME/venv - python3 -m venv $HOME/venv/regional_workflow - source $HOME/venv/regional_workflow/bin/activate - python3 -m pip install jinja2 - python3 -m pip install pyyaml - python3 -m pip install f90nml - -The virtual environment can be deactivated by running the ``deactivate`` command. The virtual environment built here will be reactivated in :numref:`Step %s ` and needs to be used to generate the workflow and run the experiment. - -.. _LinuxMacExptConfig: - -Configuring an Experiment on General Linux and MacOS Systems -`````````````````````````````````````````````````````````````` - -**Optional: Install Rocoto** - -.. note:: - Users may `install Rocoto `__ if they want to make use of a workflow manager to run their experiments. However, this option has not been tested yet on MacOS and has had limited testing on general Linux plaforms. - - -**Configure the SRW App:** - -Configure an experiment using a template. Copy the contents of ``config.community.sh`` into ``config.sh``: - -.. code-block:: console - - cd $SRW/regional_workflow/ush - cp config.community.sh config.sh - -In the ``config.sh`` file, set ``MACHINE="macos"`` or ``MACHINE="linux"``, and modify the account and experiment info. For example: - -.. code-block:: console - - MACHINE="macos" - ACCOUNT="user" - EXPT_SUBDIR="" - COMPILER="gnu" - VERBOSE="TRUE" - RUN_ENVIR="community" - PREEXISTING_DIR_METHOD="rename" - - PREDEF_GRID_NAME="RRFS_CONUS_25km" - QUILTING="TRUE" - -Due to the limited number of processors on MacOS systems, users must also configure the domain decomposition defaults (usually, there are only 8 CPUs in M1-family chips and 4 CPUs for x86_64). - -For :ref:`Option 1 `, add the following information to ``config.sh``: - -.. code-block:: console - - LAYOUT_X="${LAYOUT_X:-3}" - LAYOUT_Y="${LAYOUT_Y:-2}" - WRTCMP_write_groups="1" - WRTCMP_write_tasks_per_group="2" - -For :ref:`Option 2 `, add the following information to ``config.sh``: - -.. code-block:: console - - LAYOUT_X="${LAYOUT_X:-3}" - LAYOUT_Y="${LAYOUT_Y:-1}" - WRTCMP_write_groups="1" - WRTCMP_write_tasks_per_group="1" - -.. note:: - The number of MPI processes required by the forecast will be equal to ``LAYOUT_X`` * ``LAYOUT_Y`` + ``WRTCMP_write_tasks_per_group``. - -**Configure the Machine File** - -Configure a ``macos.sh`` or ``linux.sh`` machine file in ``$SRW/regional_workflow/ush/machine/`` based on the number of CPUs (````) in the system (usually 8 or 4 in MacOS; varies on Linux systems). Job scheduler (``SCHED``) options can be viewed :ref:`here `. Users must also set the path to the fix file directories. - -.. code-block:: console - - # Commands to run at the start of each workflow task. - PRE_TASK_CMDS='{ ulimit -a; }' - - # Architecture information - WORKFLOW_MANAGER="none" - NCORES_PER_NODE=${NCORES_PER_NODE:-} - SCHED=${SCHED:-""} - - # UFS SRW App specific paths - FIXgsm="path/to/FIXgsm/files" - FIXaer="path/to/FIXaer/files" - FIXlut="path/to/FIXlut/files" - TOPO_DIR="path/to/FIXgsm/files" # (path to location of static input files used by the - make_orog task) - SFC_CLIMO_INPUT_DIR="path/to/FIXgsm/files" # (path to location of static surface climatology - input fields used by sfc_climo_gen) - - # Run commands for executables - RUN_CMD_SERIAL="time" - RUN_CMD_UTILS="mpirun -np 4" - RUN_CMD_FCST='mpirun -np ${PE_MEMBER01}' - RUN_CMD_POST="mpirun -np 4" - - -.. _VXConfig: - -Configure METplus Verification Suite (Optional) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Users who want to use the METplus verification suite to evaluate their forecasts need to add additional information to their ``config.sh`` file. Other users may skip to the :ref:`next section `. - -.. attention:: - METplus *installation* is not included as part of the build process for this release of the SRW App. However, METplus is preinstalled on many `Level 1 & 2 `__ systems. For the v2.0.0 release, METplus *use* is supported on systems with a functioning METplus installation, although installation itself is not supported. For more information about METplus, see :numref:`Section %s `. - -.. note:: - If METplus users update their METplus installation, they must update the module load statements in ``ufs-srweather-app/regional_workflow/modulefiles/tasks//run_vx.local`` file to correspond to their system's updated installation: - - .. code-block:: console - - module use -a - module load met/ - -To use METplus verification, the path to the MET and METplus directories must be added to ``config.sh``: - -.. code-block:: console - - METPLUS_PATH="" - MET_INSTALL_DIR="" - -Users who have already staged the observation data needed for METplus (i.e., the :term:`CCPA`, :term:`MRMS`, and :term:`NDAS` data) on their system should set the path to this data and set the corresponding ``RUN_TASK_GET_OBS_*`` parameters to "FALSE" in ``config.sh``. - -.. code-block:: console - - CCPA_OBS_DIR="/path/to/UFS_SRW_App/develop/obs_data/ccpa/proc" - MRMS_OBS_DIR="/path/to/UFS_SRW_App/develop/obs_data/mrms/proc" - NDAS_OBS_DIR="/path/to/UFS_SRW_App/develop/obs_data/ndas/proc" - RUN_TASK_GET_OBS_CCPA="FALSE" - RUN_TASK_GET_OBS_MRMS="FALSE" - RUN_TASK_GET_OBS_NDAS="FALSE" - -If users have access to NOAA :term:`HPSS` but have not pre-staged the data, they can simply set the ``RUN_TASK_GET_OBS_*`` tasks to "TRUE", and the machine will attempt to download the appropriate data from NOAA HPSS. The ``*_OBS_DIR`` paths must be set to the location where users want the downloaded data to reside. - -Users who do not have access to NOAA HPSS and do not have the data on their system will need to download :term:`CCPA`, :term:`MRMS`, and :term:`NDAS` data manually from collections of publicly available data, such as the ones listed `here `__. - -Next, the verification tasks must be turned on according to the user's needs. Users should add some or all of the following tasks to ``config.sh``, depending on the verification procedure(s) they have in mind: - -.. code-block:: console - - RUN_TASK_VX_GRIDSTAT="TRUE" - RUN_TASK_VX_POINTSTAT="TRUE" - RUN_TASK_VX_ENSGRID="TRUE" - RUN_TASK_VX_ENSPOINT="TRUE" - -These tasks are independent, so users may set some values to "TRUE" and others to "FALSE" depending on the needs of their experiment. Note that the ENSGRID and ENSPOINT tasks apply only to ensemble model verification. Additional verification tasks appear in :numref:`Table %s `. More details on all of the parameters in this section are available in :numref:`Section %s `. - -.. _SetUpPythonEnv: - -Set Up the Python and Other Environment Parameters ----------------------------------------------------- - -The workflow requires Python 3 with the packages ``PyYAML``, ``Jinja2``, and ``f90nml`` available. This Python environment has already been set up on Level 1 platforms, and it can be activated in the following way: - -.. code-block:: console - - module use - module load wflow_ - -The ``wflow_`` modulefile will then output instructions to activate the regional workflow. The user should run the commands specified in the modulefile output. For example, if the output says: - -.. code-block:: console - - Please do the following to activate conda: - > conda activate regional_workflow - -then the user should run ``conda activate regional_workflow``. This will activate the ``regional_workflow`` conda environment. However, the command(s) will vary from system to system. Regardless, the user should see ``(regional_workflow)`` in front of the Terminal prompt at this point. If this is not the case, activate the regional workflow from the ``ush`` directory by running: - -.. code-block:: console - - conda init - source ~/.bashrc - conda activate regional_workflow - -.. _LinuxMacActivateWFenv: - -Activating the Workflow Environment on Non-Level 1 Systems -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Users on non-Level 1 systems can copy one of the provided ``wflow_`` files and use it as a template to create a ``wflow_`` file that works for their system. ``wflow_macos`` and ``wflow_linux`` template files are provided with the release. After making appropriate modifications to a ``wflow_`` file, users can run the commands from :numref:`Step %s ` above to activate the regional workflow. - -On generic Linux or MacOS systems, loading the designated ``wflow_`` file will output instructions similar to the following: - -.. code-block:: console - - Please do the following to activate conda: - > source $VENV/bin/activate - -If that does not work, users can also try: - -.. code-block:: console - - source $HOME/venv/regional_workflow/bin/activate - -However, it may instead be necessary to make additional adjustments to the ``wflow_`` file. - -.. _GenerateWorkflow: - -Generate the Regional Workflow -------------------------------------------- - -Run the following command from the ``ufs-srweather-app/regional_workflow/ush`` directory to generate the workflow: - -.. code-block:: console - - ./generate_FV3LAM_wflow.sh - -The last line of output from this script, starting with ``*/1 * * * *`` or ``*/3 * * * *``, can be saved and :ref:`used later ` to automatically run portions of the workflow if users have the Rocoto workflow manager installed on their system. - -This workflow generation script creates an experiment directory and populates it with all the data needed to run through the workflow. The flowchart in :numref:`Figure %s ` describes the experiment generation process. First, ``generate_FV3LAM_wflow.sh`` runs the ``setup.sh`` script to set the configuration parameters. Second, it copies the time-independent (fix) files and other necessary data input files from their location in the ufs-weather-model directory to the experiment directory (``$EXPTDIR``). Third, it copies the weather model executable (``ufs_model``) from the ``bin`` directory to ``$EXPTDIR`` and creates the input namelist file ``input.nml`` based on the ``input.nml.FV3`` file in the regional_workflow/ush/templates directory. Lastly, it creates the workflow XML file ``FV3LAM_wflow.xml`` that is executed when running the experiment with the Rocoto workflow manager. - -The ``setup.sh`` script reads three other configuration scripts in order: (1) ``config_default.sh`` (:numref:`Section %s `), (2) ``config.sh`` (:numref:`Section %s `), and (3) ``set_predef_grid_params.sh``. If a parameter is specified differently in these scripts, the file containing the last defined value will be used. - -The generated workflow will appear in ``$EXPTDIR``, where ``EXPTDIR=${EXPT_BASEDIR}/${EXPT_SUBDIR}``. These variables were specified in the ``config.sh`` file in :numref:`Step %s `. The settings for these paths can also be viewed in the console output from the ``./generate_FV3LAM_wflow.sh`` script or in the ``log.generate_FV3LAM_wflow`` file, which can be found in ``$EXPTDIR``. - -.. _WorkflowGeneration: - -.. figure:: _static/FV3regional_workflow_gen_v2.png - :alt: Flowchart of the workflow generation process. Scripts are called in the following order: source_util_funcs.sh (which calls bash_utils), then set_FV3nml_sfc_climo_filenames.sh, set_FV3nml_stock_params.sh, create_diag_table_files.sh, and setup.sh. setup.sh calls several scripts: set_cycle_dates.sh, set_grid_params_GFDLgrid.sh, set_grid_params_ESGgrid.sh, link_fix.sh, set_ozone_param.sh, set_Thompson_mp_fix_files.sh, config_defaults.sh, config.sh, and valid_param_vals.sh. Then, it sets a number of variables, including FIXgsm, TOPO_DIR, and SFC_CLIMO_INPUT_DIR variables. Next, set_predef_grid_params.sh is called, and the FIXam and FIXLAM directories are set, along with the forecast input files. The setup script also calls set_extrn_mdl_params.sh, sets the GRID_GEN_METHOD with HALO, checks various parameters, and generates shell scripts. Then, the workflow generation script sets up YAML-compliant strings and generates the actual Rocoto workflow XML file from the template file (fill_jinja_template.py). The workflow generation script checks the crontab file and, if applicable, copies certain fix files to the experiment directory. Then, it copies templates of various input files to the experiment directory and sets parameters for the input.nml file. Finally, it generates the workflow. Additional information on each step appears in comments within each script. - - *Experiment generation description* - - -.. _WorkflowTaskDescription: - -Description of Workflow Tasks --------------------------------- - -.. note:: - This section gives a general overview of workflow tasks. To begin running the workflow, skip to :numref:`Step %s ` - -:numref:`Figure %s ` illustrates the overall workflow. Individual tasks that make up the workflow are specified in the ``FV3LAM_wflow.xml`` file. :numref:`Table %s ` describes the function of each baseline task. The first three pre-processing tasks; ``MAKE_GRID``, ``MAKE_OROG``, and ``MAKE_SFC_CLIMO`` are optional. If the user stages pre-generated grid, orography, and surface climatology fix files, these three tasks can be skipped by adding the following lines to the ``config.sh`` file before running the ``generate_FV3LAM_wflow.sh`` script: - -.. code-block:: console - - RUN_TASK_MAKE_GRID="FALSE" - RUN_TASK_MAKE_OROG="FALSE" - RUN_TASK_MAKE_SFC_CLIMO="FALSE" - - -.. _WorkflowTasksFig: - -.. figure:: _static/FV3LAM_wflow_flowchart_v2.png - :alt: Flowchart of the workflow tasks. If the make_grid, make_orog, and make_sfc_climo tasks are toggled off, they will not be run. If toggled on, make_grid, make_orog, and make_sfc_climo will run consecutively by calling the corresponding exregional script in the regional_workflow/scripts directory. The get_ics, get_lbcs, make_ics, make_lbcs, and run_fcst tasks call their respective exregional scripts. The run_post task will run, and if METplus verification tasks have been configured, those will run during post-processing by calling their exregional scripts. - - *Flowchart of the workflow tasks* - - -The ``FV3LAM_wflow.xml`` file runs the specific j-job scripts (``regional_workflow/jobs/JREGIONAL_[task name]``) in the prescribed order when the experiment is launched via the ``launch_FV3LAM_wflow.sh`` script or the ``rocotorun`` command. Each j-job task has its own source script (or "ex-script") named ``exregional_[task name].sh`` in the ``regional_workflow/scripts`` directory. Two database files named ``FV3LAM_wflow.db`` and ``FV3LAM_wflow_lock.db`` are generated and updated by the Rocoto calls. There is usually no need for users to modify these files. To relaunch the workflow from scratch, delete these two ``*.db`` files and then call the launch script repeatedly for each task. - - -.. _WorkflowTasksTable: - -.. table:: Baseline workflow tasks in the SRW App - - +----------------------+------------------------------------------------------------+ - | **Workflow Task** | **Task Description** | - +======================+============================================================+ - | make_grid | Pre-processing task to generate regional grid files. Only | - | | needs to be run once per experiment. | - +----------------------+------------------------------------------------------------+ - | make_orog | Pre-processing task to generate orography files. Only | - | | needs to be run once per experiment. | - +----------------------+------------------------------------------------------------+ - | make_sfc_climo | Pre-processing task to generate surface climatology files. | - | | Only needs to be run, at most, once per experiment. | - +----------------------+------------------------------------------------------------+ - | get_extrn_ics | Cycle-specific task to obtain external data for the | - | | initial conditions | - +----------------------+------------------------------------------------------------+ - | get_extrn_lbcs | Cycle-specific task to obtain external data for the | - | | lateral boundary conditions (LBCs) | - +----------------------+------------------------------------------------------------+ - | make_ics | Generate initial conditions from the external data | - +----------------------+------------------------------------------------------------+ - | make_lbcs | Generate LBCs from the external data | - +----------------------+------------------------------------------------------------+ - | run_fcst | Run the forecast model (UFS weather model) | - +----------------------+------------------------------------------------------------+ - | run_post | Run the post-processing tool (UPP) | - +----------------------+------------------------------------------------------------+ - -In addition to the baseline tasks described in :numref:`Table %s ` above, users may choose to run some or all of the METplus verification tasks. These tasks are described in :numref:`Table %s ` below. - -.. _VXWorkflowTasksTable: - -.. table:: Verification (VX) workflow tasks in the SRW App - - +-----------------------+------------------------------------------------------------+ - | **Workflow Task** | **Task Description** | - +=======================+============================================================+ - | GET_OBS_CCPA | Retrieves and organizes hourly :term:`CCPA` data from NOAA | - | | HPSS. Can only be run if ``RUN_TASK_GET_OBS_CCPA="TRUE"`` | - | | *and* user has access to NOAA :term:`HPSS` data. | - +-----------------------+------------------------------------------------------------+ - | GET_OBS_NDAS | Retrieves and organizes hourly :term:`NDAS` data from NOAA | - | | HPSS. Can only be run if ``RUN_TASK_GET_OBS_NDAS="TRUE"`` | - | | *and* user has access to NOAA HPSS data. | - +-----------------------+------------------------------------------------------------+ - | GET_OBS_MRMS | Retrieves and organizes hourly :term:`MRMS` composite | - | | reflectivity and :term:`echo top` data from NOAA HPSS. Can | - | | only be run if ``RUN_TASK_GET_OBS_MRMS="TRUE"`` *and* user | - | | has access to NOAA HPSS data. | - +-----------------------+------------------------------------------------------------+ - | VX_GRIDSTAT | Runs METplus grid-to-grid verification for 1-h accumulated | - | | precipitation | - +-----------------------+------------------------------------------------------------+ - | VX_GRIDSTAT_REFC | Runs METplus grid-to-grid verification for composite | - | | reflectivity | - +-----------------------+------------------------------------------------------------+ - | VX_GRIDSTAT_RETOP | Runs METplus grid-to-grid verification for :term:`echo top`| - +-----------------------+------------------------------------------------------------+ - | VX_GRIDSTAT_##h | Runs METplus grid-to-grid verification for 3-h, 6-h, and | - | | 24-h (i.e., daily) accumulated precipitation. Valid values | - | | for ``##`` are ``03``, ``06``, and ``24``. | - +-----------------------+------------------------------------------------------------+ - | VX_POINTSTAT | Runs METplus grid-to-point verification for surface and | - | | upper-air variables | - +-----------------------+------------------------------------------------------------+ - | VX_ENSGRID | Runs METplus grid-to-grid ensemble verification for 1-h | - | | accumulated precipitation. Can only be run if | - | | ``DO_ENSEMBLE="TRUE"`` and ``RUN_TASK_VX_ENSGRID="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - | VX_ENSGRID_REFC | Runs METplus grid-to-grid ensemble verification for | - | | composite reflectivity. Can only be run if | - | | ``DO_ENSEMBLE="TRUE"`` and | - | | ``RUN_TASK_VX_ENSGRID="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - | VX_ENSGRID_RETOP | Runs METplus grid-to-grid ensemble verification for | - | | :term:`echo top`. Can only be run if ``DO_ENSEMBLE="TRUE"``| - | | and ``RUN_TASK_VX_ENSGRID="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - | VX_ENSGRID_##h | Runs METplus grid-to-grid ensemble verification for 3-h, | - | | 6-h, and 24-h (i.e., daily) accumulated precipitation. | - | | Valid values for ``##`` are ``03``, ``06``, and ``24``. | - | | Can only be run if ``DO_ENSEMBLE="TRUE"`` and | - | | ``RUN_TASK_VX_ENSGRID="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - | VX_ENSGRID_MEAN | Runs METplus grid-to-grid verification for ensemble mean | - | | 1-h accumulated precipitation. Can only be run if | - | | ``DO_ENSEMBLE="TRUE"`` and ``RUN_TASK_VX_ENSGRID="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - | VX_ENSGRID_PROB | Runs METplus grid-to-grid verification for 1-h accumulated | - | | precipitation probabilistic output. Can only be run if | - | | ``DO_ENSEMBLE="TRUE"`` and ``RUN_TASK_VX_ENSGRID="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - | VX_ENSGRID_MEAN_##h | Runs METplus grid-to-grid verification for ensemble mean | - | | 3-h, 6-h, and 24h (i.e., daily) accumulated precipitation. | - | | Valid values for ``##`` are ``03``, ``06``, and ``24``. | - | | Can only be run if ``DO_ENSEMBLE="TRUE"`` and | - | | ``RUN_TASK_VX_ENSGRID="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - | VX_ENSGRID_PROB_##h | Runs METplus grid-to-grid verification for 3-h, 6-h, and | - | | 24h (i.e., daily) accumulated precipitation probabilistic | - | | output. Valid values for ``##`` are ``03``, ``06``, and | - | | ``24``. Can only be run if ``DO_ENSEMBLE="TRUE"`` and | - | | ``RUN_TASK_VX_ENSGRID="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - | VX_ENSGRID_PROB_REFC | Runs METplus grid-to-grid verification for ensemble | - | | probabilities for composite reflectivity. Can only be run | - | | if ``DO_ENSEMBLE="TRUE"`` and | - | | ``RUN_TASK_VX_ENSGRID="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - | VX_ENSGRID_PROB_RETOP | Runs METplus grid-to-grid verification for ensemble | - | | probabilities for :term:`echo top`. Can only be run if | - | | ``DO_ENSEMBLE="TRUE"`` and ``RUN_TASK_VX_ENSGRID="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - | VX_ENSPOINT | Runs METplus grid-to-point ensemble verification for | - | | surface and upper-air variables. Can only be run if | - | | ``DO_ENSEMBLE="TRUE"`` and ``RUN_TASK_VX_ENSPOINT="TRUE"``.| - +-----------------------+------------------------------------------------------------+ - | VX_ENSPOINT_MEAN | Runs METplus grid-to-point verification for ensemble mean | - | | surface and upper-air variables. Can only be run if | - | | ``DO_ENSEMBLE="TRUE"`` and ``RUN_TASK_VX_ENSPOINT="TRUE"``.| - +-----------------------+------------------------------------------------------------+ - | VX_ENSPOINT_PROB | Runs METplus grid-to-point verification for ensemble | - | | probabilities for surface and upper-air variables. Can | - | | only be run if ``DO_ENSEMBLE="TRUE"`` and | - | | ``RUN_TASK_VX_ENSPOINT="TRUE"``. | - +-----------------------+------------------------------------------------------------+ - - -.. _Run: - -Run the Workflow -======================= - -The workflow can be run using the Rocoto workflow manager (see :numref:`Section %s `) or using standalone wrapper scripts (see :numref:`Section %s `). - -.. attention:: - - If users are running the SRW App on a system that does not have Rocoto installed (e.g., `Level 3 & 4 `__ systems, such as MacOS or generic Linux systems), they should follow the process outlined in :numref:`Section %s ` instead of the instructions in this section. - - -.. _UseRocoto: - -Run the Workflow Using Rocoto --------------------------------- - -The information in this section assumes that Rocoto is available on the desired platform. All official HPC platforms for the UFS SRW App release make use of the Rocoto workflow management software for running experiments. However, Rocoto cannot be used when running the workflow within a container. If Rocoto is not available, it is still possible to run the workflow using stand-alone scripts according to the process outlined in :numref:`Section %s `. - -There are two main ways to run the workflow with Rocoto: (1) with the ``launch_FV3LAM_wflow.sh`` script, and (2) by manually calling the ``rocotorun`` command. Users can also automate the workflow using a crontab. - -.. note:: - Users may find it helpful to review :numref:`Chapter %s ` to gain a better understanding of Rocoto commands and workflow management before continuing, but this is not required to run the experiment. - -Optionally, an environment variable can be set to navigate to the ``$EXPTDIR`` more easily. If the login shell is bash, it can be set as follows: - -.. code-block:: console - - export EXPTDIR=// - -If the login shell is csh/tcsh, it can be set using: - -.. code-block:: console - - setenv EXPTDIR // - - -.. _Automate: - -Automated Option -^^^^^^^^^^^^^^^^^^^ - -The simplest way to run the Rocoto workflow is to automate the process using a job scheduler such as :term:`Cron`. For automatic resubmission of the workflow at regular intervals (e.g., every minute), the user can add the following commands to their ``config.sh`` file *before* generating the experiment: - -.. code-block:: console - - USE_CRON_TO_RELAUNCH="TRUE" - CRON_RELAUNCH_INTVL_MNTS="02" - -This will automatically add an appropriate entry to the user's :term:`cron table` and launch the workflow. Alternatively, the user can add a crontab entry using the ``crontab -e`` command. As mentioned in :numref:`Section %s `, the last line of output from ``./generate_FV3LAM_wflow.sh`` (starting with ``*/1 * * * *`` or ``*/3 * * * *``), can be pasted into the crontab file. It can also be found in the ``$EXPTDIR/log.generate_FV3LAM_wflow`` file. The crontab entry should resemble the following: - -.. code-block:: console - - */3 * * * * cd && ./launch_FV3LAM_wflow.sh called_from_cron="TRUE" - -where ```` is changed to correspond to the user's ``$EXPTDIR``. The number ``3`` can be changed to a different positive integer and simply means that the workflow will be resubmitted every three minutes. - -.. hint:: - - * On NOAA Cloud instances, ``*/1 * * * *`` is the preferred option for cron jobs because compute nodes will shut down if they remain idle too long. If the compute node shuts down, it can take 15-20 minutes to start up a new one. - * On other NOAA HPC systems, admins discourage the ``*/1 * * * *`` due to load problems. ``*/3 * * * *`` is the preferred option for cron jobs on non-NOAA Cloud systems. - -To check the experiment progress: - -.. code-block:: console - - cd $EXPTDIR - rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 - - - -After finishing the experiment, open the crontab using ``crontab -e`` and delete the crontab entry. - -.. note:: - - On Orion, *cron* is only available on the orion-login-1 node, so users will need to work on that node when running *cron* jobs on Orion. - -.. _Success: - -The workflow run is complete when all tasks have "SUCCEEDED". If everything goes smoothly, users will eventually see a workflow status table similar to the following: - -.. code-block:: console - - CYCLE TASK JOBID STATE EXIT STATUS TRIES DURATION - ========================================================================================================== - 201906150000 make_grid 4953154 SUCCEEDED 0 1 5.0 - 201906150000 make_orog 4953176 SUCCEEDED 0 1 26.0 - 201906150000 make_sfc_climo 4953179 SUCCEEDED 0 1 33.0 - 201906150000 get_extrn_ics 4953155 SUCCEEDED 0 1 2.0 - 201906150000 get_extrn_lbcs 4953156 SUCCEEDED 0 1 2.0 - 201906150000 make_ics 4953184 SUCCEEDED 0 1 16.0 - 201906150000 make_lbcs 4953185 SUCCEEDED 0 1 71.0 - 201906150000 run_fcst 4953196 SUCCEEDED 0 1 1035.0 - 201906150000 run_post_f000 4953244 SUCCEEDED 0 1 5.0 - 201906150000 run_post_f001 4953245 SUCCEEDED 0 1 4.0 - ... - 201906150000 run_post_f012 4953381 SUCCEEDED 0 1 7.0 - -If users choose to run METplus verification tasks as part of their experiment, the output above will include additional lines after ``run_post_f012``. The output will resemble the following but may be significantly longer when using ensemble verification: - -.. code-block:: console - - CYCLE TASK JOBID STATE EXIT STATUS TRIES DURATION - ========================================================================================================== - 201906150000 make_grid 30466134 SUCCEEDED 0 1 5.0 - ... - 201906150000 run_post_f012 30468271 SUCCEEDED 0 1 7.0 - 201906150000 run_gridstatvx 30468420 SUCCEEDED 0 1 53.0 - 201906150000 run_gridstatvx_refc 30468421 SUCCEEDED 0 1 934.0 - 201906150000 run_gridstatvx_retop 30468422 SUCCEEDED 0 1 1002.0 - 201906150000 run_gridstatvx_03h 30468491 SUCCEEDED 0 1 43.0 - 201906150000 run_gridstatvx_06h 30468492 SUCCEEDED 0 1 29.0 - 201906150000 run_gridstatvx_24h 30468493 SUCCEEDED 0 1 20.0 - 201906150000 run_pointstatvx 30468423 SUCCEEDED 0 1 670.0 - - -Launch the Rocoto Workflow Using a Script -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Users who prefer not to automate their experiments can run the Rocoto workflow using the ``launch_FV3LAM_wflow.sh`` script provided. Simply call it without any arguments from the experiment directory: - -.. code-block:: console - - cd $EXPTDIR - ./launch_FV3LAM_wflow.sh - -This script creates a log file named ``log.launch_FV3LAM_wflow`` in ``$EXPTDIR`` or appends information to the file if it already exists. The launch script also creates the ``log/FV3LAM_wflow.log`` file, which shows Rocoto task information. Check the end of the log file periodically to see how the experiment is progressing: - -.. code-block:: console - - tail -n 40 log.launch_FV3LAM_wflow - -In order to launch additional tasks in the workflow, call the launch script again; this action will need to be repeated until all tasks in the workflow have been launched. To (re)launch the workflow and check its progress on a single line, run: - -.. code-block:: console - - ./launch_FV3LAM_wflow.sh; tail -n 40 log.launch_FV3LAM_wflow - -This will output the last 40 lines of the log file, which list the status of the workflow tasks (e.g., SUCCEEDED, DEAD, RUNNING, SUBMITTING, QUEUED). The number 40 can be changed according to the user's preferences. The output will look like this: - -.. code-block:: console - - CYCLE TASK JOBID STATE EXIT STATUS TRIES DURATION - ====================================================================================================== - 202006170000 make_grid druby://hfe01:33728 SUBMITTING - 0 0.0 - 202006170000 make_orog - - - - - - 202006170000 make_sfc_climo - - - - - - 202006170000 get_extrn_ics druby://hfe01:33728 SUBMITTING - 0 0.0 - 202006170000 get_extrn_lbcs druby://hfe01:33728 SUBMITTING - 0 0.0 - 202006170000 make_ics - - - - - - 202006170000 make_lbcs - - - - - - 202006170000 run_fcst - - - - - - 202006170000 run_post_00 - - - - - - 202006170000 run_post_01 - - - - - - 202006170000 run_post_02 - - - - - - 202006170000 run_post_03 - - - - - - 202006170000 run_post_04 - - - - - - 202006170000 run_post_05 - - - - - - 202006170000 run_post_06 - - - - - - - Summary of workflow status: - ~~~~~~~~~~~~~~~~~~~~~~~~~~ - - 0 out of 1 cycles completed. - Workflow status: IN PROGRESS - -If all the tasks complete successfully, the "Workflow status" at the bottom of the log file will change from "IN PROGRESS" to "SUCCESS". If certain tasks could not complete, the "Workflow status" will instead change to "FAILURE". Error messages for each specific task can be found in the task log files located in ``$EXPTDIR/log``. - -The workflow run is complete when all tasks have "SUCCEEDED", and the ``rocotostat`` command outputs a table similar to the one :ref:`above `. - - -.. _RocotoManualRun: - -Launch the Rocoto Workflow Manually -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -**Load Rocoto** - -Instead of running the ``./launch_FV3LAM_wflow.sh`` script, users can load Rocoto and any other required modules. This gives the user more control over the process and allows them to view experiment progress more easily. On Level 1 systems, the Rocoto modules are loaded automatically in :numref:`Step %s `. For most other systems, a variant on the following commands will be necessary to load the Rocoto module: - -.. code-block:: console - - module use - module load rocoto - -Some systems may require a version number (e.g., ``module load rocoto/1.3.3``) - -**Run the Rocoto Workflow** - -After loading Rocoto, call ``rocotorun`` from the experiment directory to launch the workflow tasks. This will start any tasks that do not have a dependency. As the workflow progresses through its stages, ``rocotostat`` will show the state of each task and allow users to monitor progress: - -.. code-block:: console - - cd $EXPTDIR - rocotorun -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 - rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 - -The ``rocotorun`` and ``rocotostat`` commands above will need to be resubmitted regularly and repeatedly until the experiment is finished. In part, this is to avoid having the system time out. This also ensures that when one task ends, tasks dependent on it will run as soon as possible, and ``rocotostat`` will capture the new progress. - -If the experiment fails, the ``rocotostat`` command will indicate which task failed. Users can look at the log file in the ``log`` subdirectory for the failed task to determine what caused the failure. For example, if the ``make_grid`` task failed, users can open the ``make_grid.log`` file to see what caused the problem: - -.. code-block:: console - - cd $EXPTDIR/log - vi make_grid.log - -.. note:: - - If users have the `Slurm workload manager `__ on their system, they can run the ``squeue`` command in lieu of ``rocotostat`` to check what jobs are currently running. - - -.. _RunUsingStandaloneScripts: - -Run the Workflow Using Stand-Alone Scripts ---------------------------------------------- - -.. note:: - The Rocoto workflow manager cannot be used inside a container. - -The regional workflow can be run using standalone shell scripts in cases where the Rocoto software is not available on a given platform. If Rocoto *is* available, see :numref:`Section %s ` to run the workflow using Rocoto. - -#. ``cd`` into the experiment directory - -#. Set the environment variable ``$EXPTDIR`` for either bash or csh, respectively: - - .. code-block:: console - - export EXPTDIR=`pwd` - setenv EXPTDIR `pwd` - -#. Copy the wrapper scripts from the ``regional_workflow`` directory into the experiment directory. Each workflow task has a wrapper script that sets environment variables and runs the job script. - - .. code-block:: console - - cp /ufs-srweather-app/regional_workflow/ush/wrappers/* . - -#. Set the ``OMP_NUM_THREADS`` variable. - - .. code-block:: console - - export OMP_NUM_THREADS=1 - -#. Run each of the listed scripts in order. Scripts with the same stage number (listed in :numref:`Table %s `) may be run simultaneously. - - .. code-block:: console - - ./run_make_grid.sh - ./run_get_ics.sh - ./run_get_lbcs.sh - ./run_make_orog.sh - ./run_make_sfc_climo.sh - ./run_make_ics.sh - ./run_make_lbcs.sh - ./run_fcst.sh - ./run_post.sh - -Check the batch script output file in your experiment directory for a “SUCCESS” message near the end of the file. - -.. _RegionalWflowTasks: - -.. table:: List of tasks in the regional workflow in the order that they are executed. - Scripts with the same stage number may be run simultaneously. The number of - processors and wall clock time is a good starting point for Cheyenne or Hera - when running a 48-h forecast on the 25-km CONUS domain. For a brief description of tasks, see :numref:`Table %s `. - - +------------+------------------------+----------------+----------------------------+ - | **Stage/** | **Task Run Script** | **Number of** | **Wall clock time (H:mm)** | - | **step** | | **Processors** | | - +============+========================+================+============================+ - | 1 | run_get_ics.sh | 1 | 0:20 (depends on HPSS vs | - | | | | FTP vs staged-on-disk) | - +------------+------------------------+----------------+----------------------------+ - | 1 | run_get_lbcs.sh | 1 | 0:20 (depends on HPSS vs | - | | | | FTP vs staged-on-disk) | - +------------+------------------------+----------------+----------------------------+ - | 1 | run_make_grid.sh | 24 | 0:20 | - +------------+------------------------+----------------+----------------------------+ - | 2 | run_make_orog.sh | 24 | 0:20 | - +------------+------------------------+----------------+----------------------------+ - | 3 | run_make_sfc_climo.sh | 48 | 0:20 | - +------------+------------------------+----------------+----------------------------+ - | 4 | run_make_ics.sh | 48 | 0:30 | - +------------+------------------------+----------------+----------------------------+ - | 4 | run_make_lbcs.sh | 48 | 0:30 | - +------------+------------------------+----------------+----------------------------+ - | 5 | run_fcst.sh | 48 | 0:30 | - +------------+------------------------+----------------+----------------------------+ - | 6 | run_post.sh | 48 | 0:25 (2 min per output | - | | | | forecast hour) | - +------------+------------------------+----------------+----------------------------+ - -Users can access log files for specific tasks in the ``$EXPTDIR/log`` directory. To see how the experiment is progressing, users can also check the end of the ``log.launch_FV3LAM_wflow`` file from the command line: - -.. code-block:: console - - tail -n 40 log.launch_FV3LAM_wflow - -.. hint:: - If any of the scripts return an error that "Primary job terminated normally, but one process returned a non-zero exit code," there may not be enough space on one node to run the process. On an HPC system, the user will need to allocate a(nother) compute node. The process for doing so is system-dependent, and users should check the documentation available for their HPC system. Instructions for allocating a compute node on NOAA Cloud systems can be viewed in :numref:`Section %s ` as an example. - -.. note:: - On most HPC systems, users will need to submit a batch job to run multi-processor jobs. On some HPC systems, users may be able to run the first two jobs (serial) on a login node/command-line. Example scripts for Slurm (Hera) and PBS (Cheyenne) resource managers are provided (``sq_job.sh`` and ``qsub_job.sh``, respectively). These examples will need to be adapted to each user's system. Alternatively, some batch systems allow users to specify most of the settings on the command line (with the ``sbatch`` or ``qsub`` command, for example). - - - -.. _PlotOutput: - -Plot the Output -=============== -Two python scripts are provided to generate plots from the :term:`FV3`-LAM post-processed :term:`GRIB2` output. Information on how to generate the graphics can be found in :numref:`Chapter %s `. diff --git a/docs/UsersGuide/source/BuildSRW.rst b/docs/UsersGuide/source/BuildSRW.rst new file mode 100644 index 0000000000..e768372233 --- /dev/null +++ b/docs/UsersGuide/source/BuildSRW.rst @@ -0,0 +1,480 @@ +.. _BuildSRW: + +========================== +Building the SRW App +========================== + +The Unified Forecast System (:term:`UFS`) Short-Range Weather (SRW) Application is an :term:`umbrella repository` consisting of a number of different :ref:`components ` housed in external repositories. Once the SRW App is built, users can configure experiments and generate predictions of atmospheric behavior over a limited spatial area and on time scales ranging from minutes out to several days. + +.. attention:: + + The SRW Application has `four levels of support `__. The steps described in this chapter will work most smoothly on preconfigured (Level 1) systems. This chapter can also serve as a starting point for running the SRW App on other systems (including generic Linux/Mac systems), but the user may need to perform additional troubleshooting. + +.. note:: + The :ref:`container approach ` is recommended for a smoother first-time build and run experience. Building without a container may allow for more customization. However, the non-container approach requires more in-depth system-based knowledge, especially on Level 3 and 4 systems, so it is less appropriate for beginners. + +To build the SRW App, users will complete the following steps: + + #. :ref:`Install prerequisites ` + #. :ref:`Clone the SRW App from GitHub ` + #. :ref:`Check out the external repositories ` + #. :ref:`Set up the build environment and build the executables ` + +.. _AppBuildProc: + +.. figure:: _static/SRW_build_process.png + :alt: Flowchart describing the SRW App build process. + + *Overview of the SRW App Build Process* + + +.. _HPCstackInfo: + +Install the Prerequisite Software Stack +========================================== + +Currently, installation of the prerequisite software stack is supported via HPC-Stack. :term:`HPC-Stack` is a repository that provides a unified, shell script-based system to build the software stack required for `UFS `__ applications such as the SRW App. + +.. Attention:: + Skip the HPC-Stack installation if working on a `Level 1 system `__ (e.g., Cheyenne, Hera, Orion, NOAA Cloud), and :ref:`continue to the next section `. + +Background +---------------- + +The UFS Weather Model draws on over 50 code libraries to run its applications. These libraries range from libraries developed in-house at NOAA (e.g., NCEPLIBS, FMS) to libraries developed by NOAA's partners (e.g., PIO, ESMF) to truly third party libraries (e.g., netCDF). Individual installation of these libraries is not practical, so the `HPC-Stack `__ was developed as a central installation system to ensure that the infrastructure environment across multiple platforms is as similar as possible. Installation of the HPC-Stack is required to run the SRW App. + +Instructions +------------------------- +Users working on systems that fall under `Support Levels 2-4 `__ will need to install the HPC-Stack the first time they try to build applications (such as the SRW App) that depend on it. Users can either build the HPC-Stack on their local system or use the centrally maintained stacks on each HPC platform if they are working on a Level 1 system. Before installing the HPC-Stack, users on both Linux and MacOS systems should set the stack size to "unlimited" (if allowed) or to the largest possible value: + +.. code-block:: console + + # Linux, if allowed + ulimit -s unlimited + + # MacOS, this corresponds to 65MB + ulimit -S -s unlimited + +For a detailed description of installation options, see :ref:`Installing the HPC-Stack `. + +.. attention:: + Although HPC-Stack is the fully-supported option as of the v2.1.0 release, UFS applications are gradually shifting to :term:`spack-stack`, which is a :term:`Spack`-based method for installing UFS prerequisite software libraries. The spack-stack is currently used on NOAA Cloud platforms and in containers, while HPC-Stack is still used on other Level 1 systems and is the software stack validated by the UFS Weather Model as of the v2.1.0 release. Users are encouraged to check out `spack-stack `__ to prepare for the upcoming shift in support from HPC-Stack to spack-stack. + +After completing installation, continue to the next section (:numref:`Section %s: Download the UFS SRW Application Code `). + +.. _DownloadSRWApp: + +Download the UFS SRW Application Code +====================================== +The SRW Application source code is publicly available on GitHub. To download the SRW App code, clone the ``develop`` branch of the repository: + +.. code-block:: console + + git clone -b develop https://github.com/ufs-community/ufs-srweather-app.git + +The cloned repository contains the configuration files and sub-directories shown in +:numref:`Table %s `. The user may set an ``$SRW`` environment variable to point to the location of the new ``ufs-srweather-app`` repository. For example, if ``ufs-srweather-app`` was cloned into the ``$HOME`` directory, the following commands will set an ``$SRW`` environment variable in a bash or csh shell, respectively: + +.. code-block:: console + + export SRW=$HOME/ufs-srweather-app + setenv SRW $HOME/ufs-srweather-app + +.. _FilesAndSubDirs: + +.. table:: Files and sub-directories of the ufs-srweather-app repository + + +--------------------------------+-----------------------------------------------------------+ + | **File/Directory Name** | **Description** | + +================================+===========================================================+ + | CMakeLists.txt | Main CMake file for SRW App | + +--------------------------------+-----------------------------------------------------------+ + | devbuild.sh | SRW App build script | + +--------------------------------+-----------------------------------------------------------+ + | docs | Contains release notes, documentation, and User's Guide | + +--------------------------------+-----------------------------------------------------------+ + | environment.yml | Contains information on the package versions required for | + | | the regional workflow environment. | + +--------------------------------+-----------------------------------------------------------+ + | etc | Contains Lmod startup scripts | + +--------------------------------+-----------------------------------------------------------+ + | Externals.cfg | Includes tags pointing to the correct version of the | + | | external GitHub repositories/branches used in the SRW | + | | App. | + +--------------------------------+-----------------------------------------------------------+ + | jobs | Contains the *j-job* script for each workflow task. These | + | | scripts set up the environment variables and call an | + | | *ex-script* script located in the ``scripts`` | + | | subdirectory. | + +--------------------------------+-----------------------------------------------------------+ + | LICENSE.md | CC0 license information | + +--------------------------------+-----------------------------------------------------------+ + | manage_externals | Utility for checking out external repositories | + +--------------------------------+-----------------------------------------------------------+ + | modulefiles | Contains build and workflow modulefiles | + +--------------------------------+-----------------------------------------------------------+ + | parm | Contains parameter files. Includes UFS Weather Model | + | | configuration files such as ``model_configure``, | + | | ``diag_table``, and ``field_table``. | + +--------------------------------+-----------------------------------------------------------+ + | README.md | Contains SRW App introductory information | + +--------------------------------+-----------------------------------------------------------+ + | rename_model.sh | Used to rename the model before it is transitioned into | + | | operations. The SRW App is a generic app that is the base | + | | for models such as :term:`AQM` and :term:`RRFS`. When | + | | these models become operational, variables like | + | | ``HOMEdir`` and ``PARMdir`` will be renamed to | + | | ``HOMEaqm``/``HOMErrfs``, ``PARMaqm``/``PARMrrfs``, etc. | + | | using this script. | + +--------------------------------+-----------------------------------------------------------+ + | scripts | Contains the *ex-script* for each workflow task. | + | | These scripts are where the script logic and executables | + | | are contained. | + +--------------------------------+-----------------------------------------------------------+ + | sorc | Contains CMakeLists.txt; external repositories | + | | will be cloned into this directory. | + +--------------------------------+-----------------------------------------------------------+ + | tests | Contains SRW App tests, including workflow end-to-end | + | | (WE2E) tests. | + +--------------------------------+-----------------------------------------------------------+ + | ufs_srweather_app_meta.h.in | Meta information for SRW App which can be used by | + | | other packages | + +--------------------------------+-----------------------------------------------------------+ + | ufs_srweather_app.settings.in | SRW App configuration summary | + +--------------------------------+-----------------------------------------------------------+ + | ush | Contains utility scripts. Includes the experiment | + | | configuration file and the experiment generation file. | + +--------------------------------+-----------------------------------------------------------+ + | versions | Contains ``run.ver`` and ``build.ver`` files, which track | + | | package versions at run time and compile time, | + | | respectively. | + +--------------------------------+-----------------------------------------------------------+ + +.. COMMENT: Is environment.yml deprecated? Remove? + +.. _CheckoutExternals: + +Check Out External Components +================================ + +The SRW App relies on a variety of components (e.g., UFS_UTILS, ufs-weather-model, and UPP) detailed in :numref:`Chapter %s ` of this User's Guide. Each component has its own repository. Users must run the ``checkout_externals`` script to collect the individual components of the SRW App from their respective GitHub repositories. The ``checkout_externals`` script uses the configuration file ``Externals.cfg`` in the top level directory of the SRW App to clone the correct tags (code versions) of the external repositories listed in :numref:`Section %s ` into the appropriate directories (e.g., ``ush``, ``sorc``). + +Run the executable that pulls in SRW App components from external repositories: + +.. code-block:: console + + cd + ./manage_externals/checkout_externals + +The script should output dialogue indicating that it is retrieving different code repositories. It may take several minutes to download these repositories. + +To see more options for the ``checkout_externals`` script, users can run ``./manage_externals/checkout_externals -h``. For example: + + * ``-S``: Outputs the status of the repositories managed by ``checkout_externals``. By default only summary information is provided. Use with the ``-v`` (verbose) option to see details. + * ``-x [EXCLUDE [EXCLUDE ...]]``: allows users to exclude components when checking out externals. + * ``-o``: By default only the required externals are checked out. This flag will also check out the optional externals. + +Generally, users will not need to use the options and can simply run the script, but the options are available for those who are curious. + +.. _BuildExecutables: + +Set Up the Environment and Build the Executables +=================================================== + +.. _DevBuild: + +``devbuild.sh`` Approach +----------------------------- + +On Level 1 systems for which a modulefile is provided under the ``modulefiles`` directory, users can build the SRW App binaries with the following command: + +.. code-block:: console + + ./devbuild.sh --platform= + +where ```` is replaced with the name of the platform the user is working on. Valid values include: ``cheyenne`` | ``gaea`` | ``hera`` | ``jet`` | ``linux`` | ``macos`` | ``noaacloud`` | ``orion`` + +.. note:: + Although build modulefiles exist for generic Linux and MacOS machines, users will need to alter these according to the instructions in Sections :numref:`%s ` & :numref:`%s `. Users on these systems may have more success building the SRW App with the :ref:`CMake Approach ` instead. + +If compiler auto-detection fails for some reason, specify it using the ``--compiler`` argument. For example: + +.. code-block:: console + + ./devbuild.sh --platform=hera --compiler=intel + +where valid values are ``intel`` or ``gnu``. + +The last line of the console output should be ``[100%] Built target ufs-weather-model``, indicating that the UFS Weather Model executable has been built successfully. + +If users want to build the optional ``GSI`` and ``rrfs_utl`` components for :term:`RRFS`, they can pass the ``gsi`` and ``rrfs_utils`` arguments to ``devbuild.sh``. For example: + +.. code-block:: console + + ./devbuild.sh -p=hera gsi rrfs_utils + +.. note:: + RRFS capabilities are currently build-only features. They are not yet available for use at runtime. + +The last few lines of the RRFS console output should be: + +.. code-block:: console + + [100%] Built target RRFS_UTILS + Install the project... + -- Install configuration: "RELEASE" + -- Installing: /path/to/ufs-srweather-app/exec/ufs_srweather_app.settings + +After running ``devbuild.sh``, the executables listed in :numref:`Table %s ` should appear in the ``ufs-srweather-app/exec`` directory. If users choose to build the ``GSI`` and ``rrfs_utils`` components, the executables listed in :numref:`Table %s ` will also appear there. If the ``devbuild.sh`` build method does not work, or if users are not on a supported machine, they will have to manually set up the environment and build the SRW App binaries with CMake as described in :numref:`Section %s `. + +.. _ExecDescription: + +.. table:: Names and descriptions of the executables produced by the build step and used by the SRW App + + +------------------------+---------------------------------------------------------------------------------+ + | **Executable Name** | **Description** | + +========================+=================================================================================+ + | chgres_cube | Reads in raw external model (global or regional) and surface climatology data | + | | to create initial and lateral boundary conditions | + +------------------------+---------------------------------------------------------------------------------+ + | emcsfc_ice_blend | Blends National Ice Center sea ice cover and EMC sea ice concentration data to | + | | create a global sea ice analysis used to update the GFS once per day | + +------------------------+---------------------------------------------------------------------------------+ + | emcsfc_snow2mdl | Blends National Ice Center snow cover and Air Force snow depth data to create a | + | | global depth analysis used to update the GFS snow field once per day | + +------------------------+---------------------------------------------------------------------------------+ + | filter_topo | Filters topography based on resolution | + +------------------------+---------------------------------------------------------------------------------+ + | fregrid | Remaps data from the input mosaic grid to the output mosaic grid | + +------------------------+---------------------------------------------------------------------------------+ + | fvcom_to_FV3 | Determines lake surface conditions for the Great Lakes | + +------------------------+---------------------------------------------------------------------------------+ + | global_cycle | Updates the GFS surface conditions using external snow and sea ice analyses | + +------------------------+---------------------------------------------------------------------------------+ + | global_equiv_resol | Calculates a global, uniform, cubed-sphere equivalent resolution for the | + | | regional Extended Schmidt Gnomonic (ESG) grid | + +------------------------+---------------------------------------------------------------------------------+ + | inland | Creates an inland land mask by determining inland (i.e., non-coastal) points | + | | and assigning a value of 1. Default value is 0. | + +------------------------+---------------------------------------------------------------------------------+ + | lakefrac | Calculates the ratio of the lake area to the grid cell area at each atmospheric | + | | grid point. | + +------------------------+---------------------------------------------------------------------------------+ + | make_hgrid | Computes geo-referencing parameters (e.g., latitude, longitude, grid cell area) | + | | for global uniform grids | + +------------------------+---------------------------------------------------------------------------------+ + | make_solo_mosaic | Creates mosaic files with halos | + +------------------------+---------------------------------------------------------------------------------+ + | orog | Generates orography, land mask, and gravity wave drag files from fixed files | + +------------------------+---------------------------------------------------------------------------------+ + | orog_gsl | Creates orographic statistics fields required for the orographic drag suite | + | | developed by NOAA's Global Systems Laboratory (GSL) | + +------------------------+---------------------------------------------------------------------------------+ + | regional_esg_grid | Generates an ESG regional grid based on a user-defined namelist | + +------------------------+---------------------------------------------------------------------------------+ + | sfc_climo_gen | Creates surface climatology fields from fixed files for use in ``chgres_cube`` | + +------------------------+---------------------------------------------------------------------------------+ + | shave | Shaves the excess halo rows down to what is required for the lateral boundary | + | | conditions (LBCs) in the orography and grid files | + +------------------------+---------------------------------------------------------------------------------+ + | upp.x | Post processor for the model output | + +------------------------+---------------------------------------------------------------------------------+ + | ufs_model | UFS Weather Model executable | + +------------------------+---------------------------------------------------------------------------------+ + | vcoord_gen | Generates hybrid coordinate interface profiles | + +------------------------+---------------------------------------------------------------------------------+ + +.. _RRFSexec: + +.. table:: Names and descriptions of the executables produced when the RRFS option is enabled + + +----------------------------+-----------------------------------------------------------------------------+ + | **Executable Name** | **Description** | + +============================+=============================================================================+ + | gsi.x | Runs the Gridpoint Statistical Interpolation (GSI). | + +----------------------------+-----------------------------------------------------------------------------+ + | enkf.x | Runs the Ensemble Kalman Filter. | + +----------------------------+-----------------------------------------------------------------------------+ + | adjust_soiltq.exe | Uses the lowest-level temperature and moisture analysis increments to | + | | adjust the soil moisture and soil temperature after analysis. | + +----------------------------+-----------------------------------------------------------------------------+ + | check_imssnow_fv3lam.exe | This is a tool used to read snow and ice fields from surface files and | + | | check those fields. | + +----------------------------+-----------------------------------------------------------------------------+ + | fv3lam_nonvarcldana.exe | Runs the non-variational cloud and precipitable hydrometeor analysis based | + | | on the METAR cloud observations, satellite retrieved cloud top products, | + | | and radar reflectivity. | + +----------------------------+-----------------------------------------------------------------------------+ + | gen_annual_maxmin_GVF.exe | Generates maximum and minimum greenness vegetation fraction (GVF) files | + | | based on year-long GVF observations for the ``update_GVF`` process. | + +----------------------------+-----------------------------------------------------------------------------+ + | gen_cs.exe | NCL scripts to do cross section plotting. | + +----------------------------+-----------------------------------------------------------------------------+ + | gen_ensmean_recenter.exe | Runs the ensemble mean/recentering calculation for FV3LAM ensemble files. | + +----------------------------+-----------------------------------------------------------------------------+ + | lakesurgery.exe | Replaces the existing lake depth with the GLOBathy bathymetry. It is | + | | designed to work with the HRRR model. | + +----------------------------+-----------------------------------------------------------------------------+ + | nc_diag_cat.x | Performs :term:`NetCDF` Diagnostic Concatenation. Reads metadata while | + | | allocating necessary space, defines variables with the metadata (no | + | | attributes are stored), then finally add data to the output file. | + | | This is the MPI executable. | + +----------------------------+-----------------------------------------------------------------------------+ + | process_imssnow_fv3lam.exe | Uses FV3LAM snow and ice fields based on the snow and ice information from | + | | imssnow. | + +----------------------------+-----------------------------------------------------------------------------+ + | process_larccld.exe | Processes NASA Langley cloud top product, which reads the cloud top | + | | pressure, temperature, etc. and maps them to the ESG grid. | + +----------------------------+-----------------------------------------------------------------------------+ + | process_Lightning.exe | Processes lightning data. Reads NLDN NetCDF observation files and map the | + | | lightning observations into FV3LAM ESG grid. | + +----------------------------+-----------------------------------------------------------------------------+ + | process_metarcld.exe | Processes METAR ceilometer cloud observations. Reads the cloud base and | + | | coverage observations from PrepBUFR and distributes the cloud, weather, | + | | and visibility observations to the ESG grid. | + +----------------------------+-----------------------------------------------------------------------------+ + | process_NSSL_mosaic.exe | Processes :term:`NSSL` MRMS radar reflectivity mosaic observations. Reads | + | | 33-level NSSL MRMS radar reflectivity grib2 files and then interpolates the | + | | reflectivity horizontally to the ESG grid. | + +----------------------------+-----------------------------------------------------------------------------+ + | process_updatesst.exe | Updates Sea Surface Temperature (SST) field based on the SST analysis from | + | | NCEP. | + +----------------------------+-----------------------------------------------------------------------------+ + | ref2tten.exe | Calculates temperature tendency based on the radar reflectivity observation | + | | at each grid point. This temperature tendency can be used by the model | + | | during integration as latent heating initialization for ongoing | + | | precipitation systems, especially convection. | + +----------------------------+-----------------------------------------------------------------------------+ + | test_nc_unlimdims.x | Checks to see the number of fields with unlimited dimensions in NetCDF | + | | files. | + +----------------------------+-----------------------------------------------------------------------------+ + | ufs_srweather_app.settings | | + +----------------------------+-----------------------------------------------------------------------------+ + | update_bc.exe | Adjusts 0-h boundary conditions based on the analysis results during data | + | | assimilation cycling. | + +----------------------------+-----------------------------------------------------------------------------+ + | update_GVF.exe | Updates the GVF in the surface file based on the real-time observation | + | | files. | + +----------------------------+-----------------------------------------------------------------------------+ + | update_ice.exe | Replaces ice fields in warm start surface files based on the forecast from | + | | cold start forecast using the GFS as the initial file. | + +----------------------------+-----------------------------------------------------------------------------+ + | use_raphrrr_sfc.exe | Uses RAP and HRRR surface fields to replace the surface fields in FV3LAM. | + | | This is only used for starting the RRFS surface cycling. | + +----------------------------+-----------------------------------------------------------------------------+ + +.. COMMENT: What does ufs_srweather_app.settings do? + - precipitable hydrometeor analysis? + - What does the update_ice.exe description mean? + + +.. _CMakeApproach: + +CMake Approach +----------------- + +Set Up the Build Environment +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. attention:: + * If users successfully built the executables in :numref:`Table %s `, they should skip to step :numref:`Chapter %s `. + * Users who want to build the SRW App on MacOS or generic Linux systems should skip to :numref:`Section %s ` and follow the approach there. + +If the ``devbuild.sh`` approach failed, users need to set up their environment to run a workflow on their specific platform. First, users should make sure ``Lmod`` is the app used for loading modulefiles. This is the case on most Level 1 systems; however, on systems such as Gaea/Odin, the default modulefile loader is from Cray and must be switched to Lmod. For example, on Gaea, users can run one of the following two commands depending on whether they have a bash or csh shell, respectively: + +.. code-block:: console + + source etc/lmod-setup.sh gaea + source etc/lmod-setup.csh gaea + +.. note:: + + If users execute one of the above commands on systems that don't need it, it will not cause any problems (it will simply do a ``module purge``). + +From here, ``Lmod`` is ready to load the modulefiles needed by the SRW App. These modulefiles are located in the ``modulefiles`` directory. To load the necessary modulefile for a specific ```` using a given ````, run: + +.. code-block:: console + + module use + module load build__ + +where ```` is the full path to the ``modulefiles`` directory. + +This will work on Level 1 systems, where a modulefile is available in the ``modulefiles`` directory. On Level 2-4 systems (including generic Linux/MacOS systems), users will need to modify certain environment variables, such as the path to HPC-Stack, so that the SRW App can find and load the appropriate modules. For systems with Lmod installed, one of the current ``build__`` modulefiles can be copied and used as a template. To check whether Lmod is installed, run ``echo $LMOD_PKG``, and see if it outputs a path to the Lmod package. On systems without Lmod, users can modify or set the required environment variables with the ``export`` or ``setenv`` commands, depending on whether they are using a bash or csh/tcsh shell, respectively: + +.. code-block:: + + export = + setenv + +Note that building the SRW App without Lmod is not supported at this time. It should be possible to do so, but it has not been tested. Users are encouraged to install Lmod on their system. + +.. _BuildCMake: + +Build the Executables Using CMake +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +After setting up the build environment in the preceding section (by loading the ``build__`` modulefile), users need to build the executables required to run the SRW App. In the ``ufs-srweather-app`` directory, create a subdirectory to hold the build's executables: + +.. code-block:: console + + mkdir build + cd build + +From the build directory, run the following commands to build the pre-processing utilities, forecast model, and post-processor: + +.. code-block:: console + + cmake .. -DCMAKE_INSTALL_PREFIX=.. -DCMAKE_INSTALL_BINDIR=exec .. + make -j 4 >& build.out & + +``-DCMAKE_INSTALL_PREFIX`` specifies the location where the ``exec``, ``include``, ``lib``, and ``share`` directories will be created. These directories will contain various components of the SRW App. Its recommended value ``..`` denotes one directory up from the build directory. In the next line, the ``make`` argument ``-j 4`` indicates that the build will run in parallel with 4 threads. Although users can specify a larger or smaller number of threads (e.g., ``-j 8``, ``-j 2``), it is highly recommended to use at least 4 parallel threads to prevent overly long installation times. + +The build will take a few minutes to complete. When it starts, a random number is printed to the console, and when it is done, a ``[1]+ Done`` message is printed to the console. ``[1]+ Exit`` indicates an error. Output from the build will be in the ``ufs-srweather-app/build/build.out`` file. When the build completes, users should see the forecast model executable ``ufs_model`` and several pre- and post-processing executables in the ``ufs-srweather-app/exec`` directory. These executables are described in :numref:`Table %s `. + +.. hint:: + + If you see the ``build.out`` file, but there is no ``ufs-srweather-app/exec`` directory, wait a few more minutes for the build to complete. + +.. _MacLinuxDetails: + +Additional Details for Building on MacOS or Generic Linux +------------------------------------------------------------ + +.. note:: + Users who are **not** building the SRW App on MacOS or generic Linux platforms may skip to :numref:`Section %s ` to finish building the SRW App or continue to :numref:`Chapter %s ` to configure and run an experiment. + +The SRW App can be built on MacOS and generic Linux machines after the prerequisite software has been installed on these systems (via :term:`HPC-Stack` or :term:`spack-stack`). The installation for MacOS is architecture-independent and has been tested using both x86_64 and M1 chips (running natively). The following configurations for MacOS have been tested: + + #. MacBookPro 2019, 2.4 GHz 8-core Intel Core i9 (x86_64), Monterey Sur 12.1, GNU compiler suite v.11.3.0 (gcc, gfortran, g++); mpich 3.3.2 or openmpi/4.1.2 + #. MacBookAir 2020, M1 chip (arm64, running natively), 4+4 cores, Big Sur 11.6.4, GNU compiler suite v.11.3.0 (gcc, gfortran, g++); mpich 3.3.2 or openmpi/4.1.2 + #. MacBook Pro 2015, 2.8 GHz Quad-Core Intel Core i7 (x86_64), Catalina OS X 10.15.7, GNU compiler suite v.11.2.0_3 (gcc, gfortran, g++); mpich 3.3.2 or openmpi/4.1.2 + +Several Linux builds have been tested on systems with x86_64 architectures. + +The ``./modulefiles/build__gnu.lua`` modulefile (where ```` is ``macos`` or ``linux``) is written as a Lmod module in the Lua language, and it can be loaded once the Lmod module environment has been initialized (which should have happened even prior to :ref:`installing HPC-Stack `). This module lists the location of the HPC-Stack modules, loads the meta-modules and modules, sets serial and parallel compilers, additional flags, and any environment variables needed for building the SRW App. The modulefile must be modified to include the absolute path to the user's HPC-Stack installation: + +.. code-block:: console + + - This path should point to your HPCstack installation directory + local HPCstack="/Users/username/hpc-stack/install" + +Linux users need to configure the ``ufs-srweather-app/etc/lmod-setup.sh`` file for the ``linux`` case and set the ``BASH_ENV`` variable to point to the Lmod initialization script. There is no need to modify this script for the ``macos`` case presuming that Lmod followed a standard installation procedure using the Homebrew package manager for MacOS. + +Next, users must source the Lmod setup file, just as they would on other systems, and load the modulefiles needed for building and running the SRW App: + +.. code-block:: console + + source etc/lmod-setup.sh + module use + module load build__gnu + export LDFLAGS+=" -L${MPI_ROOT}/lib " + +In a csh/tcsh shell, users would run ``source etc/lmod-setup.csh `` in place of the first line in the code block above. The last line is primarily needed for the MacOS platforms. + +Proceed to building the executables using the process outlined in :numref:`Step %s `. + +Run an Experiment +===================== + +To configure and run an experiment, users should proceed to :numref:`Chapter %s `. diff --git a/docs/UsersGuide/source/Components.rst b/docs/UsersGuide/source/Components.rst index bafb4ead29..85b72a5289 100644 --- a/docs/UsersGuide/source/Components.rst +++ b/docs/UsersGuide/source/Components.rst @@ -8,11 +8,11 @@ The SRW Application assembles a variety of components, including: * Pre-processor Utilities & Initial Conditions * UFS Weather Forecast Model -* Unified Post-Processor +* Unified Post Processor * Visualization Examples * Build System and Workflow -These components are documented within this User's Guide and supported through a `community forum `_. +These components are documented within this User's Guide and supported through the `GitHub Discussions `__ forum. .. _Utils: @@ -30,25 +30,23 @@ The SRW Application can be initialized from a range of operational initial condi Forecast Model ============== -The prognostic atmospheric model in the UFS SRW Application is the Finite-Volume Cubed-Sphere -(:term:`FV3`) dynamical core configured with a Limited Area Model (:term:`LAM`) capability :cite:`BlackEtAl2021`. The :term:`dynamical core` is the computational part of a model that solves the equations of fluid motion. A User’s Guide for the UFS :term:`Weather Model` is `here `__. +The prognostic atmospheric model in the UFS SRW Application is the Finite-Volume Cubed-Sphere (:term:`FV3`) dynamical core configured with a Limited Area Model (:term:`LAM`) capability :cite:`BlackEtAl2021`. The :term:`dynamical core` is the computational part of a model that solves the equations of fluid motion. A User's Guide for the UFS Weather Model can be accessed `here `__. Supported model resolutions in this release include 3-, 13-, and 25-km predefined contiguous U.S. (:term:`CONUS`) domains, each with 127 vertical levels. Preliminary tools for users to define their own domain are also available in the release with full, formal support of these tools to be provided in future releases. The Extended Schmidt Gnomonic (ESG) grid is used with the FV3-LAM, which features relatively uniform grid cells across the entirety of the domain. Additional information about the FV3 dynamical core can be found in the `scientific documentation `__, the `technical documentation `__, and on the `NOAA Geophysical Fluid Dynamics Laboratory website `__. -Interoperable atmospheric physics, along with various land surface model options, are supported through the Common Community Physics Package (CCPP), described `here `__. Atmospheric physics are a set of numerical methods describing small-scale processes such as clouds, turbulence, radiation, and their interactions. There will be four physics suites supported for the SRW App v2.0.0 release. The first is the FV3_RRFS_v1beta physics suite, which is being tested for use in the future operational implementation of the Rapid Refresh Forecast System (RRFS) planned for 2023-2024, and the second is an updated version of the physics suite used in the operational Global Forecast System (GFS) v16. Additionally, FV3_WoFS_v0 and FV3_HRRR will be supported. A scientific description of the CCPP parameterizations and suites can be found in the `CCPP Scientific Documentation `__, and CCPP technical aspects are described in the `CCPP Technical Documentation `__. The model namelist has many settings beyond the physics options that can optimize various aspects of the model for use with each of the supported suites. Additional information on Stochastic Physics options is available `here `__. +Interoperable atmospheric physics, along with various land surface model options, are supported through the Common Community Physics Package (CCPP), described `here `__. Atmospheric physics are a set of numerical methods describing small-scale processes such as clouds, turbulence, radiation, and their interactions. There are four physics suites supported as of the SRW App v2.1.0 release. The first is the FV3_RRFS_v1beta physics suite, which is being tested for use in the future operational implementation of the Rapid Refresh Forecast System (:term:`RRFS`) planned for 2023-2024, and the second is an updated version of the physics suite used in the operational Global Forecast System (GFS) v16. Additionally, FV3_WoFS_v0 and FV3_HRRR are supported. A detailed list of CCPP updates since the SRW App v2.0.0 release is available :ref:`here `. A full scientific description of CCPP parameterizations and suites can be found in the `CCPP Scientific Documentation `__, and CCPP technical aspects are described in the `CCPP Technical Documentation `__. The model namelist has many settings beyond the physics options that can optimize various aspects of the model for use with each of the supported suites. Additional information on Stochastic Physics options is available `here `__. .. note:: SPP is currently only available for specific physics schemes used in the RAP/HRRR physics suite. Users need to be aware of which physics suite definition file (:term:`SDF`) is chosen when turning this option on. Among the supported physics suites, the full set of parameterizations can only be used with the ``FV3_HRRR`` option for ``CCPP_PHYS_SUITE``. The SRW App supports the use of both :term:`GRIB2` and :term:`NEMSIO` input data. The UFS Weather Model ingests initial and lateral boundary condition files produced by :term:`chgres_cube` and outputs files in netCDF format on a specific projection (e.g., Lambert Conformal) in the horizontal direction and model levels in the vertical direction. -Post-processor +Post Processor ============== The SRW Application is distributed with the Unified Post Processor (:term:`UPP`) included in the workflow as a way to convert the netCDF output on the native model grid to :term:`GRIB2` format on standard isobaric vertical coordinates. The UPP can also be used to compute a variety of useful diagnostic fields, as described in the `UPP User's Guide `__. -Output from UPP can be used with visualization, plotting, and verification packages or in -further downstream post-processing (e.g., statistical post-processing techniques). +Output from UPP can be used with visualization, plotting, and verification packages or in further downstream post-processing (e.g., statistical post-processing techniques). .. _MetplusComponent: @@ -57,39 +55,32 @@ METplus Verification Suite The enhanced Model Evaluation Tools (`METplus `__) verification system has been integrated into the SRW App to facilitate forecast evaluation. METplus is a verification framework that spans a wide range of temporal scales (warn-on-forecast to climate) and spatial scales (storm to global). It is supported by the `Developmental Testbed Center (DTC) `__. -METplus *installation* is not included as part of the build process for the most recent release of the SRW App. However, METplus is preinstalled on many `Level 1 & 2 `__ systems; existing builds can be viewed `here `__. METplus can be installed on other systems individually or as part of :term:`HPC-Stack` installation. Users on systems without a previous installation of METplus can follow the `MET Installation Guide `__ and `METplus Installation Guide `__ for individual installation. Currently, METplus *installation* is not a supported feature for this release of the SRW App. However, METplus *use* is supported on systems with a functioning METplus installation. +METplus *installation* is not included as part of the build process for the most recent release of the SRW App. However, METplus is preinstalled on many `Level 1 & 2 `__ systems; existing builds can be viewed `here `__. -The core components of the METplus framework include the statistical driver, MET, the associated database and display systems known as METviewer and METexpress, and a suite of Python wrappers to provide low-level automation and examples, also called use-cases. MET is a set of verification tools developed for use by the :term:`NWP` community. It matches up grids with either gridded analyses or point observations and applies configurable methods to compute statistics and diagnostics. Extensive documentation is available in the `METplus User’s Guide `__ and `MET User’s Guide `__. Documentation for all other components of the framework can be found at the Documentation link for each component on the METplus `downloads `__ page. +METplus can be installed on other systems individually or as part of :term:`HPC-Stack` installation. Users on systems without a previous installation of METplus can follow the `MET Installation Guide `__ and `METplus Installation Guide `__ for individual installation. Currently, METplus *installation* is not a supported feature for this release of the SRW App. However, METplus *use* is supported on systems with a functioning METplus installation. -Among other techniques, MET provides the capability to compute standard verification scores for comparing deterministic gridded model data to point-based and gridded observations. It also provides ensemble and probabilistic verification methods for comparing gridded model data to point-based or gridded observations. Verification tasks to accomplish these comparisons are defined in the SRW App in :numref:`Table %s `. Currently, the SRW App supports the use of :term:`NDAS` observation files in `prepBUFR format `__ (which include conventional point-based surface and upper-air data) for point-based verification. It also supports gridded Climatology-Calibrated Precipitation Analysis (:term:`CCPA`) data for accumulated precipitation evaluation and Multi-Radar/Multi-Sensor (:term:`MRMS`) gridded analysis data for composite reflectivity and :term:`echo top` verification. +The core components of the METplus framework include the statistical driver, MET, the associated database and display systems known as METviewer and METexpress, and a suite of Python wrappers to provide low-level automation and examples, also called use cases. MET is a set of verification tools developed for use by the :term:`NWP` community. It matches up grids with either gridded analyses or point observations and applies configurable methods to compute statistics and diagnostics. Extensive documentation is available in the `METplus User's Guide `__ and `MET User's Guide `__. Documentation for all other components of the framework can be found at the Documentation link for each component on the METplus `downloads `__ page. -METplus is being actively developed by :term:`NCAR`/Research Applications Laboratory (RAL), NOAA/Earth Systems Research Laboratories (ESRL), and NOAA/Environmental Modeling Center (:term:`EMC`), and it is open to community contributions. - - -Visualization Example -===================== -A Python script is provided to create basic visualizations of the model output. The script -is designed to output graphics in PNG format for 14 standard meteorological variables -when using the pre-defined :term:`CONUS` domain. A difference plotting script is also included to visually compare two runs for the same domain and resolution. These scripts are provided only as an example for users familiar with Python. They may be used to perform a visual check to verify that the application is producing reasonable results. +Among other techniques, MET provides the capability to compute standard verification scores for comparing deterministic gridded model data to point-based and gridded observations. It also provides ensemble and probabilistic verification methods for comparing gridded model data to point-based or gridded observations. Verification tasks to accomplish these comparisons are defined in the SRW App in :numref:`Table %s `. Currently, the SRW App supports the use of :term:`NDAS` observation files (which include conventional point-based surface and upper-air data) in `prepBUFR format `__ for point-based verification. It also supports gridded Climatology-Calibrated Precipitation Analysis (:term:`CCPA`) data for accumulated precipitation evaluation and Multi-Radar/Multi-Sensor (:term:`MRMS`) gridded analysis data for composite reflectivity and :term:`echo top` verification. -After running ``manage_externals/checkout_externals``, the visualization scripts will be available in the ``ufs-srweather-app/regional_workflow/ush/Python`` directory. Usage information and instructions are described in :numref:`Chapter %s ` and are also included at the top of the script. +METplus is being actively developed by :term:`NCAR`/Research Applications Laboratory (RAL), NOAA/Earth Systems Research Laboratories (ESRL), and NOAA/Environmental Modeling Center (:term:`EMC`), and it is open to community contributions. Build System and Workflow ========================= The SRW Application has a portable build system and a user-friendly, modular, and expandable workflow framework. -An umbrella CMake-based build system is used for building the components necessary for running the end-to-end SRW Application, including the UFS Weather Model and the pre- and post-processing software. Additional libraries necessary for the application (e.g., :term:`NCEPLIBS-external` and :term:`NCEPLIBS`) are not included in the SRW Application build system but are available pre-built on pre-configured platforms. On other systems, they can be installed via the HPC-Stack (see :doc:`HPC-Stack Documentation `). There is a small set of system libraries and utilities that are assumed to be present on the target computer: the CMake build software, a Fortran, C, and C++ compiler, and an :term:`MPI` library. +An umbrella CMake-based build system is used for building the components necessary for running the end-to-end SRW Application, including the UFS Weather Model and the pre- and post-processing software. Additional libraries necessary for the application (e.g., :term:`NCEPLIBS-external` and :term:`NCEPLIBS`) are not included in the SRW Application build system but are available pre-built on pre-configured platforms. On other systems, they can be installed via the HPC-Stack (see :doc:`HPC-Stack Documentation `). There is a small set of system libraries and utilities that are assumed to be present on the target computer: the CMake build software; a Fortran, C, and C++ compiler; and an :term:`MPI` library. Once built, the provided experiment generator script can be used to create a Rocoto-based -workflow file that will run each task in the system in the proper sequence (see :numref:`Chapter %s ` or the `Rocoto documentation `_ for more information on Rocoto). If Rocoto and/or a batch system is not present on the available platform, the individual components can be run in a stand-alone, command line fashion with provided run scripts. The generated namelist for the atmospheric model can be modified in order to vary settings such as forecast starting and ending dates, forecast length hours, the :term:`CCPP` physics suite, integration time step, history file output frequency, and more. It also allows for configuration of other elements of the workflow; for example, users can choose whether to run some or all of the pre-processing, forecast model, and post-processing steps. +workflow file that will run each task in the system in the proper sequence (see :numref:`Chapter %s ` or the `Rocoto documentation `__ for more information on Rocoto). If Rocoto and/or a batch system is not present on the available platform, the individual components can be run in a stand-alone, command line fashion with provided run scripts. The generated namelist for the atmospheric model can be modified in order to vary settings such as forecast starting and ending dates, forecast length hours, the :term:`CCPP` physics suite, integration time step, history file output frequency, and more. It also allows for configuration of other elements of the workflow; for example, users can choose whether to run some or all of the pre-processing, forecast model, and post-processing steps. + +An optional Python plotting task is also included to create basic visualizations of the model output. The task outputs graphics in PNG format for several standard meteorological variables on the pre-defined :term:`CONUS` domain. A difference plotting option is also included to visually compare two runs for the same domain and resolution. These plots may be used to perform a visual check to verify that the application is producing reasonable results. Configuration instructions are provided in :numref:`Section %s `. -The latest SRW Application release has been tested on a variety of platforms widely used by -researchers, such as the NOAA Research and Development High-Performance Computing Systems -(RDHPCS), including Hera, Orion, and Jet; the National Center for Atmospheric Research (:term:`NCAR`) Cheyenne system; the National Severe Storms Laboratory (NSSL) HPC machine, Odin; the National Science Foundation Stampede2 system; and generic Linux and MacOS systems using Intel and GNU compilers. Four `levels of support `_ have been defined for the SRW Application, including pre-configured (Level 1), configurable (Level 2), limited test platforms (Level 3), and build only platforms (Level 4). Each level is further described below. +The latest SRW Application release has been tested on a variety of platforms widely used by researchers, such as the NOAA Research and Development High-Performance Computing Systems (RDHPCS), including Hera, Orion, and Jet; the National Center for Atmospheric Research (:term:`NCAR`) Cheyenne system; and generic Linux and MacOS systems using Intel and GNU compilers. Four `levels of support `__ have been defined for the SRW Application, including pre-configured (Level 1), configurable (Level 2), limited-test (Level 3), and build-only (Level 4) platforms. Each level is further described below. On pre-configured (Level 1) computational platforms, all the required libraries for building the SRW Application are available in a central place. That means bundled libraries (NCEPLIBS) and third-party libraries (NCEPLIBS-external) have both been built. The SRW Application is expected to build and run out-of-the-box on these pre-configured platforms. A few additional computational platforms are considered configurable for the SRW Application release. Configurable platforms (Level 2) are platforms where all of the required libraries for building the SRW Application are expected to install successfully but are not available in a central location. Applications and models are expected to build and run once the required bundled libraries (e.g., NCEPLIBS) and third-party libraries (e.g., NCEPLIBS-external) are built. -Limited-Test (Level 3) and Build-Only (Level 4) computational platforms are those in which the developers have built the code but little or no pre-release testing has been conducted, respectively. A complete description of the levels of support, along with a list of preconfigured and configurable platforms can be found in the `SRW Application Wiki `_. +Limited-Test (Level 3) and Build-Only (Level 4) computational platforms are those in which the developers have built the code but little or no pre-release testing has been conducted, respectively. A complete description of the levels of support, along with a list of preconfigured and configurable platforms can be found in the `SRW Application Wiki `__. diff --git a/docs/UsersGuide/source/ConfigWorkflow.rst b/docs/UsersGuide/source/ConfigWorkflow.rst index 3c3d4cffac..57d8151cbb 100644 --- a/docs/UsersGuide/source/ConfigWorkflow.rst +++ b/docs/UsersGuide/source/ConfigWorkflow.rst @@ -1,18 +1,24 @@ .. _ConfigWorkflow: -============================================================================================ -Workflow Parameters: Configuring the Workflow in ``config.sh`` and ``config_defaults.sh`` -============================================================================================ -To create the experiment directory and workflow when running the SRW Application, the user must create an experiment configuration file named ``config.sh``. This file contains experiment-specific information, such as dates, external model data, observation data, directories, and other relevant settings. To help the user, two sample configuration files have been included in the ``regional_workflow`` repository's ``ush`` directory: ``config.community.sh`` and ``config.nco.sh``. The first is for running experiments in community mode (``RUN_ENVIR`` set to "community"), and the second is for running experiments in "nco" mode (``RUN_ENVIR`` set to "nco"). Note that for this release, only "community" mode is supported. These files can be used as the starting point from which to generate a variety of experiment configurations for the SRW App. +================================================================================================ +Workflow Parameters: Configuring the Workflow in ``config.yaml`` and ``config_defaults.yaml`` +================================================================================================ +To create the experiment directory and workflow when running the SRW Application, the user must create an experiment configuration file (usually named ``config.yaml``). This file contains experiment-specific information, such as forecast dates, grid and physics suite choices, data directories, and other relevant settings. To help the user, two sample configuration files have been included in the ``ush`` directory: ``config.community.yaml`` and ``config.nco.yaml``. The first is for running experiments in *community* mode (``RUN_ENVIR`` set to "community"), and the second is for running experiments in *nco* mode (``RUN_ENVIR`` set to "nco"). The content of these files can be copied into ``config.yaml`` and used as the starting point from which to generate a variety of experiment configurations for the SRW App. Note that for this release, only *community* mode is supported. -There is an extensive list of experiment parameters that a user can set when configuring the experiment. Not all of these need to be explicitly set by the user in ``config.sh``. If a user does not define an entry in the ``config.sh`` script, either its value in ``config_defaults.sh`` will be used, or it will be reset depending on other parameters, such as the platform on which the experiment will be run (specified by ``MACHINE``). Note that ``config_defaults.sh`` contains the full list of experiment parameters that a user may set in ``config.sh`` (i.e., the user cannot set parameters in ``config.sh`` that are not initialized in ``config_defaults.sh``). +There is an extensive list of experiment parameters that a user can set when configuring the experiment. Not all of these parameters need to be set explicitly by the user in ``config.yaml``. If a user does not define a variable in the ``config.yaml`` script, its value in ``config_defaults.yaml`` will be used, or the value will be reset depending on other parameters, such as the platform (``MACHINE``) selected for the experiment. -The following is a list of the parameters in the ``config_defaults.sh`` file. For each parameter, the default value and a brief description is given. +.. note:: + The ``config_defaults.yaml`` file contains the full list of experiment parameters that a user may set in ``config.yaml``. The user cannot set parameters in ``config.yaml`` that are not initialized in ``config_defaults.yaml``. -.. _PlatEnv: +The following is a list of the parameters in the ``config_defaults.yaml`` file. For each parameter, the default value and a brief description is provided. + +.. _user: + +USER Configuration Parameters +================================= + +If non-default parameters are selected for the variables in this section, they should be added to the ``user:`` section of the ``config.yaml`` file. -Platform Environment -==================== ``RUN_ENVIR``: (Default: "nco") This variable determines the workflow mode. The user can choose between two options: "nco" and "community". The "nco" mode uses a directory structure that mimics what is used in operations at NOAA/NCEP Central Operations (NCO) and at the NOAA/NCEP/Environmental Modeling Center (EMC), which works with NCO on pre-implementation testing. Specifics of the conventions used in "nco" mode can be found in the following `WCOSS Implementation Standards `__ document: @@ -21,34 +27,38 @@ Platform Environment | January 19, 2022 | Version 11.0.0 - Setting ``RUN_ENVIR`` to "community" is recommended in most cases for users who are not planning to implement their code into operations at NCO. + Setting ``RUN_ENVIR`` to "community" is recommended in most cases for users who are not planning to implement their code into operations at NCO. Valid values: ``"nco"`` | ``"community"`` ``MACHINE``: (Default: "BIG_COMPUTER") - The machine (a.k.a. platform or system) on which the workflow will run. Currently supported platforms are listed on the `SRW App Wiki page `__. When running the SRW App on any ParellelWorks/NOAA Cloud system, use "NOAACLOUD" regardless of the underlying system (AWS, GCP, or Azure). When running the SRW App in a container, set ``MACHINE`` to "SINGULARITY" regardless of the underlying platform (including on NOAA Cloud systems). Valid values: ``"HERA"`` | ``"ORION"`` | ``"JET"`` | ``"CHEYENNE"`` | ``"GAEA"`` | ``"NOAACLOUD"`` | ``"STAMPEDE"`` | ``"ODIN"`` | ``"MACOS"`` | ``"LINUX"`` | ``"SINGULARITY"`` | ``"WCOSS2"`` + The machine (a.k.a. platform or system) on which the workflow will run. Currently supported platforms are listed on the `SRW App Wiki page `__. When running the SRW App on any ParellelWorks/NOAA Cloud system, use "NOAACLOUD" regardless of the underlying system (AWS, GCP, or Azure). Valid values: ``"HERA"`` | ``"ORION"`` | ``"JET"`` | ``"CHEYENNE"`` | ``"GAEA"`` | ``"NOAACLOUD"`` | ``"STAMPEDE"`` | ``"ODIN"`` | ``"MACOS"`` | ``"LINUX"`` | ``"SINGULARITY"`` | ``"WCOSS2"`` + + .. hint:: + Users who are NOT on a named, supported Level 1 or 2 platform will need to set the ``MACHINE`` variable to ``LINUX`` or ``MACOS``; to combine use of a Linux or MacOS platform with the Rocoto workflow manager, users will also need to set ``WORKFLOW_MANAGER: "rocoto"`` in the ``platform:`` section of ``config.yaml``. This combination will assume a Slurm batch manager when generating the XML. ``MACHINE_FILE``: (Default: "") - Path to a configuration file with machine-specific settings. If none is provided, ``setup.sh`` will attempt to set the path to a configuration file for a supported platform. + Path to a configuration file with machine-specific settings. If none is provided, ``setup.py`` will attempt to set the path to a configuration file for a supported platform. ``ACCOUNT``: (Default: "project_name") The account under which users submit jobs to the queue on the specified ``MACHINE``. To determine an appropriate ``ACCOUNT`` field for `Level 1 `__ systems, users may run the ``groups`` command, which will return a list of projects that the user has permissions for. Not all of the listed projects/groups have an HPC allocation, but those that do are potentially valid account names. On some systems, the ``saccount_params`` command will display additional account details. -``COMPILER``: (Default: "intel") - Type of compiler invoked during the build step. Currently, this must be set manually (i.e., it is not inherited from the build system in the ``ufs-srweather-app`` directory). Valid values: ``"intel"`` | ``"gnu"`` +.. _PlatformConfig: + +PLATFORM Configuration Parameters +===================================== + +If non-default parameters are selected for the variables in this section, they should be added to the ``platform:`` section of the ``config.yaml`` file. ``WORKFLOW_MANAGER``: (Default: "none") - The workflow manager to use (e.g., "ROCOTO"). This is set to "none" by default, but if the machine name is set to a platform that supports Rocoto, this will be overwritten and set to "ROCOTO." Valid values: ``"rocoto"`` | ``"none"`` + The workflow manager to use (e.g., "rocoto"). This is set to "none" by default, but if the machine name is set to a platform that supports Rocoto, this will be overwritten and set to "rocoto." If set explicitly to "rocoto" along with the use of the ``MACHINE: "LINUX"`` target, the configuration layer assumes a Slurm batch manager when generating the XML. Valid values: ``"rocoto"`` | ``"none"`` ``NCORES_PER_NODE``: (Default: "") - The number of cores available per node on the compute platform. Set for supported platforms in ``setup.sh``, but it is now also configurable for all platforms. - -``LMOD_PATH``: (Default: "") - Path to the LMOD shell file on the user's Linux system. It is set automatically for supported machines. + The number of cores available per node on the compute platform. Set for supported platforms in ``setup.py``, but it is now also configurable for all platforms. ``BUILD_MOD_FN``: (Default: "") - Name of alternative build module file to use if running on an unsupported platform. Is set automatically for supported machines. + Name of an alternative build module file to use if running on an unsupported platform. It is set automatically for supported machines. ``WFLOW_MOD_FN``: (Default: "") - Name of alternative workflow module file to use if running on an unsupported platform. Is set automatically for supported machines. + Name of an alternative workflow module file to use if running on an unsupported platform. It is set automatically for supported machines. ``BUILD_VER_FN``: (Default: "") File name containing the version of the modules used for building the app. Currently, WCOSS2 only uses this file. @@ -61,140 +71,180 @@ Platform Environment ``SCHED``: (Default: "") The job scheduler to use (e.g., Slurm) on the specified ``MACHINE``. Leaving this an empty string allows the experiment generation script to set it automatically depending on the machine the workflow is running on. Valid values: ``"slurm"`` | ``"pbspro"`` | ``"lsf"`` | ``"lsfcray"`` | ``"none"`` -Machine-Dependent Parameters: +``SCHED_NATIVE_CMD``: (Default: "") + Allows an extra parameter to be passed to the job scheduler (Slurm or PBSPRO) via XML Native command. + +``DOMAIN_PREGEN_BASEDIR``: (Default: "") + For use in NCO mode only (``RUN_ENVIR: "nco"``). The base directory containing pregenerated grid, orography, and surface climatology files. This is an alternative for setting ``GRID_DIR``, ``OROG_DIR``, and ``SFC_CLIMO_DIR`` individually. For the pregenerated grid specified by ``PREDEF_GRID_NAME``, these "fixed" files are located in: + + .. code-block:: console + + ${DOMAIN_PREGEN_BASEDIR}/${PREDEF_GRID_NAME} + + The workflow scripts will create a symlink in the experiment directory that will point to a subdirectory (having the same name as the experiment grid) under this directory. This variable should be set to a null string in ``config_defaults.yaml``, but it can be changed in the user-specified workflow configuration file set by ``EXPT_CONFIG_FN`` (usually ``config.yaml``). + +``PRE_TASK_CMDS``: (Default: "") + Pre-task commands such as ``ulimit`` needed by tasks. For example: ``'{ ulimit -s unlimited; ulimit -a; }'`` + +Machine-Dependent Parameters ------------------------------- -These parameters vary depending on machine. On `Level 1 and 2 `__ systems, the appropriate values for each machine can be viewed in the ``regional_workflow/ush/machine/.sh`` scripts. To specify a value other than the default, add these variables and the desired value in the ``config.sh`` file so that they override the ``config_defaults.sh`` and machine default values. +These parameters vary depending on machine. On `Level 1 and 2 `__ systems, the appropriate values for each machine can be viewed in the ``ush/machine/.sh`` scripts. To specify a value other than the default, add these variables and the desired value in the ``config.yaml`` file so that they override the ``config_defaults.yaml`` and machine default values. ``PARTITION_DEFAULT``: (Default: "") - This variable is only used with the Slurm job scheduler (i.e., if ``SCHED`` is set to "slurm"). This is the default partition to which Slurm submits workflow tasks. When a variable that designates the partition (e.g., ``PARTITION_HPSS``, ``PARTITION_FCST``; see below) is **not** specified, the task will be submitted to the default partition indicated in the ``PARTITION_DEFAULT`` variable. If this value is not set or is set to an empty string, it will be (re)set to a machine-dependent value. Valid values: ``""`` | ``"hera"`` | ``"normal"`` | ``"orion"`` | ``"sjet,vjet,kjet,xjet"`` | ``"workq"`` - -``CLUSTERS_DEFAULT``: (Default: "") - This variable is only used with the Slurm job scheduler (i.e., if ``SCHED`` is set to "slurm"). These are the default clusters to which Slurm submits workflow tasks. If ``CLUSTERS_HPSS`` or ``CLUSTERS_FCST`` (see below) are not specified, the task will be submitted to the default clusters indicated in this variable. If this value is not set or is set to an empty string, it will be (re)set to a machine-dependent value. + This variable is only used with the Slurm job scheduler (i.e., when ``SCHED: "slurm"``). This is the default partition to which Slurm submits workflow tasks. When a variable that designates the partition (e.g., ``PARTITION_HPSS``, ``PARTITION_FCST``; see below) is **not** specified, the task will be submitted to the default partition indicated in the ``PARTITION_DEFAULT`` variable. If this value is not set or is set to an empty string, it will be (re)set to a machine-dependent value. Options are machine-dependent and include: ``""`` | ``"hera"`` | ``"normal"`` | ``"orion"`` | ``"sjet"`` | ``"vjet"`` | ``"kjet"`` | ``"xjet"`` | ``"workq"`` ``QUEUE_DEFAULT``: (Default: "") - The default queue or QOS to which workflow tasks are submitted (QOS is Slurm's term for queue; it stands for "Quality of Service"). If the task's ``QUEUE_HPSS`` or ``QUEUE_FCST`` parameters (see below) are not specified, the task will be submitted to the queue indicated by this variable. If this value is not set or is set to an empty string, it will be (re)set to a machine-dependent value. Valid values: ``""`` | ``"batch"`` | ``"dev"`` | ``"normal"`` | ``"regular"`` | ``"workq"`` + The default queue or QOS to which workflow tasks are submitted (QOS is Slurm's term for queue; it stands for "Quality of Service"). If the task's ``QUEUE_HPSS`` or ``QUEUE_FCST`` parameters (see below) are not specified, the task will be submitted to the queue indicated by this variable. If this value is not set or is set to an empty string, it will be (re)set to a machine-dependent value. Options are machine-dependent and include: ``""`` | ``"batch"`` | ``"dev"`` | ``"normal"`` | ``"regular"`` | ``"workq"`` ``PARTITION_HPSS``: (Default: "") - This variable is only used with the Slurm job scheduler (i.e., if ``SCHED`` is set to "slurm"). Tasks that get or create links to external model files are submitted to the partition specified in this variable. These links are needed to generate initial conditions (:term:`ICs`) and lateral boundary conditions (:term:`LBCs`) for the experiment. If this variable is not set or is set to an empty string, it will be (re)set to the ``PARTITION_DEFAULT`` value (if set) or to a machine-dependent value. Valid values: ``""`` | ``"normal"`` | ``"service"`` | ``"workq"`` - -``CLUSTERS_HPSS``: (Default: "") - This variable is only used with the Slurm job scheduler (i.e., if ``SCHED`` is set to "slurm"). Tasks that get or create links to external model files are submitted to the clusters specified in this variable. These links are needed to generate initial conditions (ICs) and lateral boundary conditions (LBCs) for the experiment. If this variable is not set or is set to an empty string, it will be (re)set to a machine-dependent value. + This variable is only used with the Slurm job scheduler (i.e., when ``SCHED: "slurm"``). Tasks that get or create links to external model files are submitted to the partition specified in this variable. These links are needed to generate initial conditions (:term:`ICs`) and lateral boundary conditions (:term:`LBCs`) for the experiment. If this variable is not set or is set to an empty string, it will be (re)set to the ``PARTITION_DEFAULT`` value (if set) or to a machine-dependent value. Options are machine-dependent and include: ``""`` | ``"normal"`` | ``"service"`` | ``"workq"`` ``QUEUE_HPSS``: (Default: "") - Tasks that get or create links to external model files are submitted to this queue, or QOS (QOS is Slurm's term for queue; it stands for "Quality of Service"). If this value is not set or is set to an empty string, it will be (re)set to a machine-dependent value. Valid values: ``""`` | ``"batch"`` | ``"dev_transfer"`` | ``"normal"`` | ``"regular"`` | ``"workq"`` + Tasks that get or create links to external model files are submitted to this queue, or QOS (QOS is Slurm's term for queue; it stands for "Quality of Service"). These links are needed to generate initial conditions (:term:`ICs`) and lateral boundary conditions (:term:`LBCs`) for the experiment. If this value is not set or is set to an empty string, it will be (re)set to a machine-dependent value. Options are machine-dependent and include: ``""`` | ``"batch"`` | ``"dev_transfer"`` | ``"normal"`` | ``"regular"`` | ``"workq"`` ``PARTITION_FCST``: (Default: "") - This variable is only used with the Slurm job scheduler (i.e., if ``SCHED`` is set to "slurm"). The task that runs forecasts is submitted to this partition. If this variable is not set or is set to an empty string, it will be (re)set to a machine-dependent value. Valid values: ``""`` | ``"hera"`` | ``"normal"`` | ``"orion"`` | ``"sjet,vjet,kjet,xjet"`` | ``"workq"`` - -``CLUSTERS_FCST``: (Default: "") - This variable is only used with the Slurm job scheduler (i.e., if ``SCHED`` is set to "slurm"). The task that runs forecasts is submitted to this cluster. If this variable is not set or is set to an empty string, it will be (re)set to a machine-dependent value. + This variable is only used with the Slurm job scheduler (i.e., when ``SCHED: "slurm"``). The task that runs forecasts is submitted to this partition. If this variable is not set or is set to an empty string, it will be (re)set to a machine-dependent value. Options are machine-dependent and include: ``""`` | ``"hera"`` | ``"normal"`` | ``"orion"`` | ``"sjet"`` | ``"vjet"`` | ``"kjet"`` | ``"xjet"`` | ``"workq"`` ``QUEUE_FCST``: (Default: "") - The task that runs a forecast is submitted to this queue, or QOS (QOS is Slurm's term for queue; it stands for "Quality of Service"). If this variable is not set or set to an empty string, it will be (re)set to a machine-dependent value. Valid values: ``""`` | ``"batch"`` | ``"dev"`` | ``"normal"`` | ``"regular"`` | ``"workq"`` + The task that runs a forecast is submitted to this queue, or QOS (QOS is Slurm's term for queue; it stands for "Quality of Service"). If this variable is not set or set to an empty string, it will be (re)set to a machine-dependent value. Options are machine-dependent and include: ``""`` | ``"batch"`` | ``"dev"`` | ``"normal"`` | ``"regular"`` | ``"workq"`` Parameters for Running Without a Workflow Manager -================================================= -These settings control run commands for platforms without a workflow manager. Values will be ignored unless ``WORKFLOW_MANAGER="none"``. +----------------------------------------------------- +These settings set run commands for platforms without a workflow manager. Values will be ignored unless ``WORKFLOW_MANAGER: "none"``. ``RUN_CMD_UTILS``: (Default: "mpirun -np 1") The run command for MPI-enabled pre-processing utilities (e.g., shave, orog, sfc_climo_gen). This can be left blank for smaller domains, in which case the executables will run without :term:`MPI`. Users may need to use a different command for launching an MPI-enabled executable depending on their machine and MPI installation. -``RUN_CMD_FCST``: (Default: "mpirun -np \${PE_MEMBER01}") - The run command for the model forecast step. This will be appended to the end of the variable definitions file (``var_defns.sh``). Changing the ``${PE_MEMBER01}`` variable is **not** recommended; it refers to the number of MPI tasks that the Weather Model will expect to run with. Running the Weather Model with a different number of MPI tasks than the workflow has been set up for can lead to segmentation faults and other errors. It is also important to escape the ``$`` character or use single quotes here so that ``PE_MEMBER01`` is not referenced until runtime, since it is not defined at the beginning of the workflow generation script. +``RUN_CMD_FCST``: (Default: "mpirun -np ${PE_MEMBER01}") + The run command for the model forecast step. This will be appended to the end of the variable definitions file (``var_defns.sh``). Changing the ``${PE_MEMBER01}`` variable is **not** recommended; it refers to the number of MPI tasks that the Weather Model will expect to run with. Running the Weather Model with a different number of MPI tasks than the workflow has been set up for can lead to segmentation faults and other errors. ``RUN_CMD_POST``: (Default: "mpirun -np 1") The run command for post-processing (via the :term:`UPP`). Can be left blank for smaller domains, in which case UPP will run without :term:`MPI`. -.. _Cron: -Cron-Associated Parameters -========================== +METplus Parameters +---------------------- -Cron is a job scheduler accessed through the command-line on UNIX-like operating systems. It is useful for automating tasks such as the ``rocotorun`` command, which launches each workflow task in the SRW App. Cron periodically checks a cron table (aka crontab) to see if any tasks are are ready to execute. If so, it runs them. +:ref:`METplus ` is a scientific verification framework that spans a wide range of temporal and spatial scales. Many of the METplus parameters are described below, but additional documentation for the METplus components is available on the `METplus website `__. -``USE_CRON_TO_RELAUNCH``: (Default: "FALSE") - Flag that determines whether or not a line is added to the user's cron table, which calls the experiment launch script every ``CRON_RELAUNCH_INTVL_MNTS`` minutes. +``MODEL``: (Default: "") + A descriptive name of the user's choice for the model being verified. + +``MET_INSTALL_DIR``: (Default: "") + Path to top-level directory of MET installation. -``CRON_RELAUNCH_INTVL_MNTS``: (Default: "03") - The interval (in minutes) between successive calls of the experiment launch script by a cron job to (re)launch the experiment (so that the workflow for the experiment kicks off where it left off). This is used only if ``USE_CRON_TO_RELAUNCH`` is set to "TRUE". +``METPLUS_PATH``: (Default: "") + Path to top-level directory of METplus installation. -.. _DirParams: +``MET_BIN_EXEC``: (Default: "") + Name of subdirectory where METplus executables are installed. -Directory Parameters -==================== -``EXPT_BASEDIR``: (Default: "") - The full path to the base directory inside of which the experiment directory (``EXPT_SUBDIR``) will be created. If this is not specified or if it is set to an empty string, it will default to ``${HOMEdir}/../../expt_dirs``, where ``${HOMEdir}`` contains the full path to the ``regional_workflow`` directory. +.. _METParamNote: -``EXPT_SUBDIR``: (Default: "") - A descriptive name of the user's choice for the experiment directory (*not* its full path). The full path to the experiment directory, which will be contained in the variable ``EXPTDIR``, will be: +.. note:: + Where a date field is required: + * ``YYYY`` refers to the 4-digit valid year + * ``MM`` refers to the 2-digit valid month + * ``DD`` refers to the 2-digit valid day of the month + * ``HH`` refers to the 2-digit valid hour of the day + * ``mm`` refers to the 2-digit valid minutes of the hour + * ``SS`` refers to the two-digit valid seconds of the hour - .. code-block:: console +``CCPA_OBS_DIR``: (Default: "") + User-specified location of top-level directory where CCPA hourly precipitation files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``get_obs_ccpa_tn`` task. (This task is activated in the workflow by setting ``RUN_TASK_GET_OBS_CCPA: true``). - EXPTDIR="${EXPT_BASEDIR}/${EXPT_SUBDIR}" + METplus configuration files require the use of a predetermined directory structure and file names. If the CCPA files are user-provided, they need to follow the anticipated naming structure: ``{YYYYMMDD}/ccpa.t{HH}z.01h.hrap.conus.gb2``, where YYYYMMDD and HH are as described in the note :ref:`above `. When pulling observations from NOAA HPSS, the data retrieved will be placed in the ``CCPA_OBS_DIR`` directory. This path must be defind as ``//ccpa/proc``. METplus is configured to verify 01-, 03-, 06-, and 24-h accumulated precipitation using hourly CCPA files. - This parameter cannot be left as a null string. + .. note:: + There is a problem with the valid time in the metadata for files valid from 19 - 00 UTC (i.e., files under the "00" directory). The script to pull the CCPA data from the NOAA HPSS (``scripts/exregional_get_obs_ccpa.sh``) has an example of how to account for this and organize the data into a more intuitive format. When a fix is provided, it will be accounted for in the ``exregional_get_obs_ccpa.sh`` script. -``EXEC_SUBDIR``: (Default: "bin") - The name of the subdirectory of ``ufs-srweather-app`` where executables are installed. +``MRMS_OBS_DIR``: (Default: "") + User-specified location of top-level directory where MRMS composite reflectivity files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``get_obs_mrms_tn`` task (activated in the workflow by setting ``RUN_TASK_GET_OBS_MRMS: true``). When pulling observations directly from NOAA HPSS, the data retrieved will be placed in this directory. Please note, this path must be defind as ``//mrms/proc``. + + METplus configuration files require the use of a predetermined directory structure and file names. Therefore, if the MRMS files are user-provided, they need to follow the anticipated naming structure: ``{YYYYMMDD}/MergedReflectivityQCComposite_00.50_{YYYYMMDD}-{HH}{mm}{SS}.grib2``, where YYYYMMDD and {HH}{mm}{SS} are as described in the note :ref:`above `. -.. _NCOModeParms: +.. note:: + METplus is configured to look for a MRMS composite reflectivity file for the valid time of the forecast being verified; since MRMS composite reflectivity files do not always exactly match the valid time, a script (within the main script that retrieves MRMS data from the NOAA HPSS) is used to identify and rename the MRMS composite reflectivity file to match the valid time of the forecast. The script to pull the MRMS data from the NOAA HPSS has an example of the expected file-naming structure: ``scripts/exregional_get_obs_mrms.sh``. This script calls the script used to identify the MRMS file closest to the valid time: ``ush/mrms_pull_topofhour.py``. -NCO Mode Parameters -=================== -These variables apply only when using NCO mode (i.e., when ``RUN_ENVIR`` is set to "nco"). +``NDAS_OBS_DIR``: (Default: "") + User-specified location of the top-level directory where NDAS prepbufr files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``get_obs_ndas_tn`` task (activated in the workflow by setting ``RUN_TASK_GET_OBS_NDAS: true``). When pulling observations directly from NOAA HPSS, the data retrieved will be placed in this directory. Please note, this path must be defined as ``//ndas/proc``. METplus is configured to verify near-surface variables hourly and upper-air variables at 00 and 12 UTC with NDAS prepbufr files. + + METplus configuration files require the use of predetermined file names. Therefore, if the NDAS files are user-provided, they need to follow the anticipated naming structure: ``prepbufr.ndas.{YYYYMMDDHH}``, where YYYYMMDDHH is as described in the note :ref:`above `. The script to pull the NDAS data from the NOAA HPSS (``scripts/exregional_get_obs_ndas.sh``) has an example of how to rename the NDAS data into a more intuitive format with the valid time listed in the file name. -``COMINgfs``: (Default: "/base/path/of/directory/containing/gfs/input/files") - The beginning portion of the path to the directory that contains files generated by the external model (FV3GFS). The initial and lateral boundary condition generation tasks need this path in order to create initial and boundary condition files for a given cycle on the native FV3-LAM grid. For a cycle that starts on the date specified by the variable YYYYMMDD (consisting of the 4-digit year, 2-digit month, and 2-digit day of the month) and the hour specified by the variable HH (consisting of the 2-digit hour of the day), the directory in which the workflow will look for the external model files is: +Test Directories +---------------------- - .. code-block:: console +These directories are used only by the ``run_WE2E_tests.sh`` script, so they are not used unless the user runs a Workflow End-to-End (WE2E) test. Their function corresponds to the same variables without the ``TEST_`` prefix. Users typically should not modify these variables. For any alterations, the logic in the ``run_WE2E_tests.sh`` script would need to be adjusted accordingly. - $COMINgfs/gfs.$yyyymmdd/$hh/atmos +``TEST_EXTRN_MDL_SOURCE_BASEDIR``: (Default: "") + This parameter allows testing of user-staged files in a known location on a given platform. This path contains a limited dataset and likely will not be useful for most user experiments. -``FIXLAM_NCO_BASEDIR``: (Default: "") - The base directory containing pregenerated grid, orography, and surface climatology files. For the pregenerated grid type specified in the variable ``PREDEF_GRID_NAME``, these "fixed" files are located in: +``TEST_PREGEN_BASEDIR``: (Default: "") + Similar to ``DOMAIN_PREGEN_BASEDIR``, this variable sets the base directory containing pregenerated grid, orography, and surface climatology files for WE2E tests. This is an alternative for setting ``GRID_DIR``, ``OROG_DIR``, and ``SFC_CLIMO_DIR`` individually. - .. code-block:: console +``TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS``: (Default: "") + This parameter is used to test the mechanism that allows users to point to a data stream on disk. It sets up a sandbox location that mimics the stream in a more controlled way and tests the ability to access :term:`ICS`. - ${FIXLAM_NCO_BASEDIR}/${PREDEF_GRID_NAME} +``TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS``: (Default: "") + This parameter is used to test the mechanism that allows users to point to a data stream on disk. It sets up a sandbox location that mimics the stream in a more controlled way and tests the ability to access :term:`LBCS`. - The workflow scripts will create a symlink in the experiment directory that will point to a subdirectory (having the name of the grid being used) under this directory. This variable should be set to a null string in ``config_defaults.sh`` and specified by the user in the workflow configuration file (``config.sh``). -``STMP``: (Default: "/base/path/of/directory/containing/model/input/and/raw/output/files") - The beginning portion of the path to the directory that will contain :term:`cycle-dependent` model input files, symlinks to :term:`cycle-independent` input files, and raw (i.e., before post-processing) forecast output files for a given :term:`cycle`. The format for cycle dates (cdate) is ``cdate="${YYYYMMDD}${HH}"``, where the date is specified using YYYYMMDD format, and the hour is specified using HH format. The files for a cycle date will be located in the following directory: +.. _workflow: - .. code-block:: console +WORKFLOW Configuration Parameters +===================================== - $STMP/tmpnwprd/$RUN/$cdate +If non-default parameters are selected for the variables in this section, they should be added to the ``workflow:`` section of the ``config.yaml`` file. -``NET, envir, RUN``: - Variables used in forming the path to the directory that will contain the post-processor (:term:`UPP`) output files for a given cycle (see ``PTMP`` below). These are defined in the `WCOSS Implementation Standards `__ document (pp. 4-5, 19-20) as follows: +.. _Cron: - ``NET``: (Default: "rrfs") - Model name (first level of ``com`` directory structure) +Cron-Associated Parameters +------------------------------ - ``envir``: (Default: "para") - Set to "test" during the initial testing phase, "para" when running in parallel (on a schedule), and "prod" in production. (Second level of ``com`` directory structure.) +Cron is a job scheduler accessed through the command-line on UNIX-like operating systems. It is useful for automating tasks such as the ``rocotorun`` command, which launches each workflow task in the SRW App. Cron periodically checks a cron table (aka crontab) to see if any tasks are are ready to execute. If so, it runs them. - ``RUN``: (Default: "experiment_name") - Name of model run (third level of ``com`` directory structure). +``USE_CRON_TO_RELAUNCH``: (Default: false) + Flag that determines whether or not a line is added to the user's cron table, which calls the experiment launch script every ``CRON_RELAUNCH_INTVL_MNTS`` minutes. Valid values: ``True`` | ``False`` + +``CRON_RELAUNCH_INTVL_MNTS``: (Default: 3) + The interval (in minutes) between successive calls of the experiment launch script by a cron job to (re)launch the experiment (so that the workflow for the experiment kicks off where it left off). This is used only if ``USE_CRON_TO_RELAUNCH`` is set to true. + +.. _DirParams: + +Directory Parameters +----------------------- + +``EXPT_BASEDIR``: (Default: "") + The full path to the base directory in which the experiment directory (``EXPT_SUBDIR``) will be created. If this is not specified or if it is set to an empty string, it will default to ``${HOMEdir}/../expt_dirs``, where ``${HOMEdir}`` contains the full path to the ``ufs-srweather-app`` directory. + +``EXPT_SUBDIR``: (Default: "") + The user-designated name of the experiment directory (*not* its full path). The full path to the experiment directory, which will be contained in the variable ``EXPTDIR``, will be: -``PTMP``: (Default: "/base/path/of/directory/containing/postprocessed/output/files") - The beginning portion of the path to the directory that will contain the output files from the post-processor (:term:`UPP`) for a given cycle. For a cycle that starts on the date specified by YYYYMMDD and hour specified by HH (where YYYYMMDD and HH are as described above), the UPP output files will be placed in the following directory: - .. code-block:: console - $PTMP/com/$NET/$envir/$RUN.$yyyymmdd/$hh + EXPTDIR="${EXPT_BASEDIR}/${EXPT_SUBDIR}" + + This parameter cannot be left as a null string. It must be set to a non-null value in the user-defined experiment configuration file (i.e., ``config.yaml``). + +``EXEC_SUBDIR``: (Default: "exec") + The name of the subdirectory of ``ufs-srweather-app`` where executables are installed. Pre-Processing File Separator Parameters -======================================== +-------------------------------------------- + ``DOT_OR_USCORE``: (Default: "_") This variable sets the separator character(s) to use in the names of the grid, mosaic, and orography fixed files. Ideally, the same separator should be used in the names of these fixed files as in the surface climatology fixed files. Valid values: ``"_"`` | ``"."`` -File Name Parameters -==================== -``EXPT_CONFIG_FN``: (Default: "config.sh") + +Set File Name Parameters +---------------------------- + +``EXPT_CONFIG_FN``: (Default: "config.yaml") Name of the user-specified configuration file for the forecast experiment. +``CONSTANTS_FN``: (Default: "constants.yaml") + Name of the file containing definitions of various mathematical, physical, and SRW App contants. + ``RGNL_GRID_NML_FN``: (Default: "regional_grid.nml") Name of the file containing namelist settings for the code that generates an "ESGgrid" regional grid. @@ -207,23 +257,23 @@ File Name Parameters ``FV3_NML_BASE_ENS_FN``: (Default: "input.nml.base_ens") Name of the Fortran file containing the forecast model's base ensemble namelist (i.e., the original namelist file from which each of the ensemble members' namelist files is generated). -``DIAG_TABLE_FN``: (Default: "diag_table") - Name of the file specifying the fields that the forecast model will output. +``FV3_EXEC_FN``: (Default: "ufs_model") + Name to use for the forecast model executable. -``FIELD_TABLE_FN``: (Default: "field_table") - Name of the file specifying the :term:`tracers ` that the forecast model will read in from the :term:`IC/LBC ` files. +``DIAG_TABLE_TMPL_FN``: (Default: "") + Name of a template file that specifies the output fields of the forecast model. The selected physics suite is appended to this file name in ``setup.py``, taking the form ``{DIAG_TABLE_TMPL_FN}.{CCPP_PHYS_SUITE}``. Generally, the SRW App expects to read in the default value set in ``setup.py`` (i.e., ``diag_table.{CCPP_PHYS_SUITE}``), and users should **not** specify a value for ``DIAG_TABLE_TMPL_FN`` in their configuration file (i.e., ``config.yaml``) unless (1) the file name required by the model changes, and (2) they also change the names of the ``diag_table`` options in the ``ufs-srweather-app/parm`` directory. -``DATA_TABLE_FN``: (Default: "data_table") - Name of the file containing the data table read in by the forecast model. +``FIELD_TABLE_TMPL_FN``: (Default: "") + Name of a template file that specifies the :term:`tracers ` that the forecast model will read in from the :term:`IC/LBC ` files. The selected physics suite is appended to this file name in ``setup.py``, taking the form ``{FIELD_TABLE_TMPL_FN}.{CCPP_PHYS_SUITE}``. Generally, the SRW App expects to read in the default value set in ``setup.py`` (i.e., ``field_table.{CCPP_PHYS_SUITE}``), and users should **not** specify a different value for ``FIELD_TABLE_TMPL_FN`` in their configuration file (i.e., ``config.yaml``) unless (1) the file name required by the model changes, and (2) they also change the names of the ``field_table`` options in the ``ufs-srweather-app/parm`` directory. -``MODEL_CONFIG_FN``: (Default: "model_configure") - Name of the file containing settings and configurations for the :term:`NUOPC`/:term:`ESMF` component. +``DATA_TABLE_TMPL_FN``: (Default: "") + Name of a template file that contains the data table read in by the forecast model. Generally, the SRW App expects to read in the default value set in ``setup.py`` (i.e., ``data_table``), and users should **not** specify a different value for ``DATA_TABLE_TMPL_FN`` in their configuration file (i.e., ``config.yaml``) unless (1) the file name required by the model changes, and (2) they also change the name of ``data_table`` in the ``ufs-srweather-app/parm`` directory. -``NEMS_CONFIG_FN``: (Default: "nems.configure") - Name of the file containing information about the various :term:`NEMS` components and their run sequence. +``MODEL_CONFIG_TMPL_FN``: (Default: "") + Name of a template file that contains settings and configurations for the :term:`NUOPC`/:term:`ESMF` main component. Generally, the SRW App expects to read in the default value set in ``setup.py`` (i.e., ``model_configure``), and users should **not** specify a different value for ``MODEL_CONFIG_TMPL_FN`` in their configuration file (i.e., ``config.yaml``) unless (1) the file name required by the model changes, and (2) they also change the name of ``model_configure`` in the ``ufs-srweather-app/parm`` directory. -``FV3_EXEC_FN``: (Default: "ufs_model") - Name of the forecast model executable stored in the executables directory (``EXECDIR``; set during experiment generation). +``NEMS_CONFIG_TMPL_FN``: (Default: "") + Name of a template file that contains information about the various :term:`NEMS` components and their run sequence. Generally, the SRW App expects to read in the default value set in ``setup.py`` (i.e., ``nems.configure``), and users should **not** specify a different value for ``NEMS_CONFIG_TMPL_FN`` in their configuration file (i.e., ``config.yaml``) unless (1) the file name required by the model changes, and (2) they also change the name of ``nems.configure`` in the ``ufs-srweather-app/parm`` directory. ``FCST_MODEL``: (Default: "ufs-weather-model") Name of forecast model. Valid values: ``"ufs-weather-model"`` | ``"fv3gfs_aqm"`` @@ -232,13 +282,10 @@ File Name Parameters Name of the Rocoto workflow XML file that the experiment generation script creates. This file defines the workflow for the experiment. ``GLOBAL_VAR_DEFNS_FN``: (Default: "var_defns.sh") - Name of the file (a shell script) containing definitions of the primary and secondary experiment variables (parameters). This file is sourced by many scripts (e.g., the J-job scripts corresponding to each workflow task) in order to make all the experiment variables available in those scripts. The primary variables are defined in the default configuration script (``config_defaults.sh``) and in ``config.sh``. The secondary experiment variables are generated by the experiment generation script. + Name of the file (a shell script) containing definitions of the primary and secondary experiment variables (parameters). This file is sourced by many scripts (e.g., the J-job scripts corresponding to each workflow task) in order to make all the experiment variables available in those scripts. The primary variables are defined in the default configuration script (``config_defaults.yaml``) and in ``config.yaml``. The secondary experiment variables are generated by the experiment generation script. -``EXTRN_MDL_ICS_VAR_DEFNS_FN``: (Default: "extrn_mdl_ics_var_defns.sh") - Name of the file (a shell script) containing the definitions of variables associated with the external model from which :term:`ICs` are generated. This file is created by the ``GET_EXTRN_ICS_TN`` task because the values of the variables it contains are not known before this task runs. The file is then sourced by the ``MAKE_ICS_TN`` task. - -``EXTRN_MDL_LBCS_VAR_DEFNS_FN``: (Default: "extrn_mdl_lbcs_var_defns.sh") - Name of the file (a shell script) containing the definitions of variables associated with the external model from which :term:`LBCs` are generated. This file is created by the ``GET_EXTRN_LBCS_TN`` task because the values of the variables it contains are not known before this task runs. The file is then sourced by the ``MAKE_ICS_TN`` task. +``EXTRN_MDL_VAR_DEFNS_FN``: (Default: "extrn_mdl_var_defns") + Name of the file (a shell script) containing the definitions of variables associated with the external model from which :term:`ICs` or :term:`LBCs` are generated. This file is created by the ``GET_EXTRN_*_TN`` task because the values of the variables it contains are not known before this task runs. The file is then sourced by the ``MAKE_ICS_TN`` and ``MAKE_LBCS_TN`` tasks. ``WFLOW_LAUNCH_SCRIPT_FN``: (Default: "launch_FV3LAM_wflow.sh") Name of the script that can be used to (re)launch the experiment's Rocoto workflow. @@ -246,412 +293,257 @@ File Name Parameters ``WFLOW_LAUNCH_LOG_FN``: (Default: "log.launch_FV3LAM_wflow") Name of the log file that contains the output from successive calls to the workflow launch script (``WFLOW_LAUNCH_SCRIPT_FN``). -Forecast Parameters -=================== -``DATE_FIRST_CYCL``: (Default: "YYYYMMDDHH") - Starting date of the first forecast in the set of forecasts to run. Format is "YYYYMMDDHH". +.. _CCPP_Params: -``DATE_LAST_CYCL``: (Default: "YYYYMMDDHH") - Starting date of the last forecast in the set of forecasts to run. Format is "YYYYMMDDHH". +CCPP Parameter +------------------ -``INCR_CYCL_FREQ``: (Default: "24") - Increment in hours for cycle frequency (cycl_freq). The default is "24", which means cycl_freq=24:00:00. +``CCPP_PHYS_SUITE``: (Default: "FV3_GFS_v16") + This parameter indicates which :term:`CCPP` (Common Community Physics Package) physics suite to use for the forecast(s). The choice of physics suite determines the forecast model's namelist file, the diagnostics table file, the field table file, and the XML physics suite definition file, which are staged in the experiment directory or the :term:`cycle` directories under it. -``FCST_LEN_HRS``: (Default: "24") - The length of each forecast, in integer hours. + .. note:: + For information on *stochastic physics* parameters, see :numref:`Section %s `. + + **Current supported settings for the CCPP parameter are:** -Model Configuration Parameters -================================= + | ``"FV3_GFS_v16"`` + | ``"FV3_RRFS_v1beta"`` + | ``"FV3_HRRR"`` + | ``"FV3_WoFS_v0"`` -``DT_ATMOS``: (Default: "") - Time step for the outermost atmospheric model loop in seconds. This corresponds to the frequency at which the physics routines and the top level dynamics routine are called. (Note that one call to the top-level dynamics routine results in multiple calls to the horizontal dynamics, tracer transport, and vertical dynamics routines; see the `FV3 dycore scientific documentation `__ for details.) Must be set. Takes an integer value. In the SRW App, a default value for ``DT_ATMOS`` appears in the ``set_predef_grid_params.sh`` script, but a different value can be set in ``config.sh``. + **Other valid values include:** -``RESTART_INTERVAL``: (Default: "0") - Frequency of the output restart files in hours. Using the default interval ("0"), restart files are produced at the end of a forecast run. When ``RESTART_INTERVAL="1"``, restart files are produced every hour with the prefix "YYYYMMDD.HHmmSS." in the ``RESTART`` directory. + | ``"FV3_GFS_2017_gfdlmp"`` + | ``"FV3_GFS_2017_gfdlmp_regional"`` + | ``"FV3_GFS_v15p2"`` + | ``"FV3_GFS_v15_thompson_mynn_lam3km"`` -.. _InlinePost: -``WRITE_DOPOST``: (Default: "FALSE") - Flag that determines whether to use the INLINE POST option. If TRUE, the ``WRITE_DOPOST`` flag in the ``model_configure`` file will be set to "TRUE", and the post-processing tasks get called from within the weather model so that the post-processed files (in :term:`grib2` format) are output by the Weather Model at the same time that it outputs the ``dynf###.nc`` and ``phyf###.nc`` files. Setting ``WRITE_DOPOST="TRUE"`` turns off the separate ``run_post`` task (i.e., ``RUN_TASK_RUN_POST`` is set to "FALSE") in ``setup.sh``. +.. _GridGen: -METplus Parameters -===================== +Grid Generation Parameters +------------------------------ -:ref:`METplus ` is a scientific verification framework that spans a wide range of temporal and spatial scales. Many of the METplus parameters are described below, but additional documentation for the METplus components is available on the `METplus website `__. +``GRID_GEN_METHOD``: (Default: "") + This variable specifies which method to use to generate a regional grid in the horizontal plane. The values that it can take on are: -``MODEL``: (Default: "") - A descriptive name of the user's choice for the model being verified. - -``MET_INSTALL_DIR``: (Default: "") - Path to top-level directory of MET installation. + * ``"ESGgrid"``: The "ESGgrid" method will generate a regional version of the Extended Schmidt Gnomonic (ESG) grid using the map projection developed by Jim Purser of EMC (:cite:t:`Purser_2020`). "ESGgrid" is the preferred grid option. -``METPLUS_PATH``: (Default: "") - Path to top-level directory of METplus installation. + * ``"GFDLgrid"``: The "GFDLgrid" method first generates a "parent" global cubed-sphere grid. Then a portion from tile 6 of the global grid is used as the regional grid. This regional grid is referred to in the grid generation scripts as "tile 7," even though it does not correspond to a complete tile. The forecast is run only on the regional grid (i.e., on tile 7, not on tiles 1 through 6). Note that the "GFDLgrid" method is the legacy grid generation method. It is not supported in *all* predefined domains. -``MET_BIN_EXEC``: (Default: "bin") - Location where METplus executables are installed. +.. attention:: -.. _METParamNote: + If the experiment uses a **predefined grid** (i.e., if ``PREDEF_GRID_NAME`` is set to the name of a valid predefined grid), then ``GRID_GEN_METHOD`` will be reset to the value of ``GRID_GEN_METHOD`` for that grid. This will happen regardless of whether ``GRID_GEN_METHOD`` is assigned a value in the experiment configuration file; any value assigned will be overwritten. .. note:: - Where a date field is required: - * ``YYYY`` refers to the 4-digit valid year - * ``MM`` refers to the 2-digit valid month - * ``DD`` refers to the 2-digit valid day of the month - * ``HH`` refers to the 2-digit valid hour of the day - * ``mm`` refers to the 2-digit valid minutes of the hour - * ``SS`` refers to the two-digit valid seconds of the hour - -``CCPA_OBS_DIR``: (Default: "") - User-specified location of top-level directory where CCPA hourly precipitation files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``get_obs_ccpa_tn`` task. (This task is activated in the workflow by setting ``RUN_TASK_GET_OBS_CCPA="TRUE"``). - METplus configuration files require the use of a predetermined directory structure and file names. If the CCPA files are user-provided, they need to follow the anticipated naming structure: ``{YYYYMMDD}/ccpa.t{HH}z.01h.hrap.conus.gb2``, where YYYYMMDD and HH are as described in the note :ref:`above `. When pulling observations from NOAA HPSS, the data retrieved will be placed in the ``CCPA_OBS_DIR`` directory. This path must be defind as ``//ccpa/proc``. METplus is configured to verify 01-, 03-, 06-, and 24-h accumulated precipitation using hourly CCPA files. + If the experiment uses a **user-defined grid** (i.e., if ``PREDEF_GRID_NAME`` is set to a null string), then ``GRID_GEN_METHOD`` must be set in the experiment configuration file. Otherwise, the experiment generation will fail because the generation scripts check to ensure that the grid name is set to a non-empty string before creating the experiment directory. - .. note:: - There is a problem with the valid time in the metadata for files valid from 19 - 00 UTC (i.e., files under the "00" directory). The script to pull the CCPA data from the NOAA HPSS (``regional_workflow/scripts/exregional_get_ccpa_files.sh``) has an example of how to account for this and organize the data into a more intuitive format. When a fix is provided, it will be accounted for in the ``exregional_get_ccpa_files.sh`` script. +Forecast Parameters +---------------------- +``DATE_FIRST_CYCL``: (Default: "YYYYMMDDHH") + Starting date of the first forecast in the set of forecasts to run. Format is "YYYYMMDDHH". -``MRMS_OBS_DIR``: (Default: "") - User-specified location of top-level directory where MRMS composite reflectivity files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``get_obs_mrms_tn`` task (activated in the workflow by setting ``RUN_TASK_GET_OBS_MRMS="TRUE"``). When pulling observations directly from NOAA HPSS, the data retrieved will be placed in this directory. Please note, this path must be defind as ``//mrms/proc``. - - METplus configuration files require the use of a predetermined directory structure and file names. Therefore, if the MRMS files are user-provided, they need to follow the anticipated naming structure: ``{YYYYMMDD}/MergedReflectivityQCComposite_00.50_{YYYYMMDD}-{HH}{mm}{SS}.grib2``, where YYYYMMDD and {HH}{mm}{SS} are as described in the note :ref:`above `. +``DATE_LAST_CYCL``: (Default: "YYYYMMDDHH") + Starting date of the last forecast in the set of forecasts to run. Format is "YYYYMMDDHH". -.. note:: - METplus is configured to look for a MRMS composite reflectivity file for the valid time of the forecast being verified; since MRMS composite reflectivity files do not always exactly match the valid time, a script (within the main script that retrieves MRMS data from the NOAA HPSS) is used to identify and rename the MRMS composite reflectivity file to match the valid time of the forecast. The script to pull the MRMS data from the NOAA HPSS has an example of the expected file-naming structure: ``regional_workflow/scripts/exregional_get_mrms_files.sh``. This script calls the script used to identify the MRMS file closest to the valid time: ``regional_workflow/ush/mrms_pull_topofhour.py``. +``INCR_CYCL_FREQ``: (Default: 24) + Increment in hours for Rocoto cycle frequency. The default is 24, which means cycl_freq=24:00:00. +``FCST_LEN_HRS``: (Default: 24) + The length of each forecast, in integer hours. -``NDAS_OBS_DIR``: (Default: "") - User-specified location of top-level directory where NDAS prepbufr files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``get_obs_ndas_tn`` task (activated in the workflow by setting ``RUN_TASK_GET_OBS_NDAS="TRUE"``). When pulling observations directly from NOAA HPSS, the data retrieved will be placed in this directory. Please note, this path must be defined as ``//ndas/proc``. METplus is configured to verify near-surface variables hourly and upper-air variables at 00 and 12 UTC with NDAS prepbufr files. - - METplus configuration files require the use of predetermined file names. Therefore, if the NDAS files are user-provided, they need to follow the anticipated naming structure: ``prepbufr.ndas.{YYYYMMDDHH}``, where YYYYMMDD and HH are as described in the note :ref:`above `. The script to pull the NDAS data from the NOAA HPSS (``regional_workflow/scripts/exregional_get_ndas_files.sh``) has an example of how to rename the NDAS data into a more intuitive format with the valid time listed in the file name. +Pre-Existing Directory Parameter +------------------------------------ +``PREEXISTING_DIR_METHOD``: (Default: "delete") + This variable determines how to deal with pre-existing directories (resulting from previous calls to the experiment generation script using the same experiment name [``EXPT_SUBDIR``] as the current experiment). This variable must be set to one of three valid values: ``"delete"``, ``"rename"``, or ``"quit"``. The behavior for each of these values is as follows: -Initial and Lateral Boundary Condition Generation Parameters -============================================================ -``EXTRN_MDL_NAME_ICS``: (Default: "FV3GFS") - The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` + * **"delete":** The preexisting directory is deleted and a new directory (having the same name as the original preexisting directory) is created. -``EXTRN_MDL_NAME_LBCS``: (Default: "FV3GFS") - The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` + * **"rename":** The preexisting directory is renamed and a new directory (having the same name as the original pre-existing directory) is created. The new name of the preexisting directory consists of its original name and the suffix "_old###", where ``###`` is a 3-digit integer chosen to make the new name unique. -``LBC_SPEC_INTVL_HRS``: (Default: "6") - The interval (in integer hours) at which LBC files will be generated. This is also referred to as the *boundary specification interval*. Note that the model selected in ``EXTRN_MDL_NAME_LBCS`` must have data available at a frequency greater than or equal to that implied by ``LBC_SPEC_INTVL_HRS``. For example, if ``LBC_SPEC_INTVL_HRS`` is set to "6", then the model must have data available at least every 6 hours. It is up to the user to ensure that this is the case. + * **"quit":** The preexisting directory is left unchanged, but execution of the currently running script is terminated. In this case, the preexisting directory must be dealt with manually before rerunning the script. -``EXTRN_MDL_ICS_OFFSET_HRS``: (Default: "0") - Users may wish to start a forecast using forecast data from a previous cycle of an external model. This variable indicates how many hours earlier the external model started than the FV3 forecast configured here. For example, if the forecast should start from a 6-hour forecast of the GFS, then ``EXTRN_MDL_ICS_OFFSET_HRS="6"``. +Verbose Parameter +--------------------- +``VERBOSE``: (Default: true) + Flag that determines whether the experiment generation and workflow task scripts print out extra informational messages. Valid values: ``True`` | ``False`` -``EXTRN_MDL_LBCS_OFFSET_HRS``: (Default: "") - Users may wish to use lateral boundary conditions from a forecast that was started earlier than the start of the forecast configured here. This variable indicates how many hours earlier the external model started than the FV3 forecast configured here. For example, if the forecast should use lateral boundary conditions from the GFS started 6 hours earlier, then ``EXTRN_MDL_LBCS_OFFSET_HRS="6"``. Note: the default value is model-dependent and is set in ``set_extrn_mdl_params.sh``. +Debug Parameter +-------------------- +``DEBUG``: (Default: false) + Flag that determines whether to print out very detailed debugging messages. Note that if DEBUG is set to true, then VERBOSE will also be reset to true if it isn't already. Valid values: ``True`` | ``False`` -``FV3GFS_FILE_FMT_ICS``: (Default: "nemsio") - If using the FV3GFS model as the source of the :term:`ICs` (i.e., if ``EXTRN_MDL_NAME_ICS="FV3GFS"``), this variable specifies the format of the model files to use when generating the ICs. Valid values: ``"nemsio"`` | ``"grib2"`` | ``"netcdf"`` +Compiler +----------- -``FV3GFS_FILE_FMT_LBCS``: (Default: "nemsio") - If using the FV3GFS model as the source of the :term:`LBCs` (i.e., if ``EXTRN_MDL_NAME_ICS="FV3GFS"``), this variable specifies the format of the model files to use when generating the LBCs. Valid values: ``"nemsio"`` | ``"grib2"`` | ``"netcdf"`` +``COMPILER``: (Default: "intel") + Type of compiler invoked during the build step. Currently, this must be set manually; it is not inherited from the build system in the ``ufs-srweather-app`` directory. Valid values: ``"intel"`` | ``"gnu"`` +Verification Parameters +--------------------------- +``GET_OBS``: (Default: "get_obs") + Set the name of the Rocoto workflow task used to load proper module files for ``GET_OBS_*`` tasks. Users typically do not need to change this value. -Base Directories for External Model Files -=========================================== +``VX_TN``: (Default: "run_vx") + Set the name of the Rocoto workflow task used to load proper module files for ``VX_*`` tasks. Users typically do not need to change this value. -.. note:: - These variables must be defined as null strings in ``config_defaults.sh`` so that if they are specified by the user in the experiment configuration file (``config.sh``), they remain set to those values, and if not, they get set to machine-dependent values. +``VX_ENSGRID_TN``: (Default: "run_ensgridvx") + Set the name of the Rocoto workflow task that runs METplus grid-to-grid ensemble verification for 1-h accumulated precipitation. Users typically do not need to change this value. -``EXTRN_MDL_SYSBASEDIR_ICS``: (Default: "") - Base directory on the local machine containing external model files for generating :term:`ICs` on the native grid. The way the full path containing these files is constructed depends on the user-specified external model for ICs (defined in ``EXTRN_MDL_NAME_ICS`` above). +``VX_ENSGRID_PROB_REFC_TN``: (Default: "run_ensgridvx_prob_refc") + Set the name of the Rocoto workflow task that runs METplus grid-to-grid verification for ensemble probabilities for composite reflectivity. Users typically do not need to change this value. -``EXTRN_MDL_SYSBASEDIR_LBCS``: (Default: "") - Base directory on the local machine containing external model files for generating :term:`LBCs` on the native grid. The way the full path containing these files is constructed depends on the user-specified external model for LBCs (defined in ``EXTRN_MDL_NAME_LBCS`` above). +``MAXTRIES_VX_ENSGRID_PROB_REFC``: (Default: 1) + Maximum number of times to attempt ``VX_ENSGRID_PROB_REFC_TN``. -User-Staged External Model Directory and File Parameters -======================================================== -``USE_USER_STAGED_EXTRN_FILES``: (Default: "FALSE") - Flag that determines whether the workflow will look for the external model files needed for generating :term:`ICs` and :term:`LBCs` in user-specified directories (rather than fetching them from mass storage like NOAA :term:`HPSS`). +.. _NCOModeParms: -``EXTRN_MDL_SOURCE_BASEDIR_ICS``: (Default: "/base/dir/containing/user/staged/extrn/mdl/files/for/ICs") - Directory containing external model files for generating ICs. If ``USE_USER_STAGED_EXTRN_FILES`` is set to "TRUE", the workflow looks within this directory for a subdirectory named "YYYYMMDDHH", which contains the external model files specified by the array ``EXTRN_MDL_FILES_ICS``. This "YYYYMMDDHH" subdirectory corresponds to the start date and cycle hour of the forecast (see :ref:`above `). These files will be used to generate the :term:`ICs` on the native FV3-LAM grid. This variable is not used if ``USE_USER_STAGED_EXTRN_FILES`` is set to "FALSE". - -``EXTRN_MDL_FILES_ICS``: (Default: "ICS_file1" "ICS_file2" "...") - Array containing the file names to search for in the ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` directory. This variable is not used if ``USE_USER_STAGED_EXTRN_FILES`` is set to "FALSE". +NCO-Specific Variables +========================= -``EXTRN_MDL_SOURCE_BASEDIR_LBCS``: (Default: "/base/dir/containing/user/staged/extrn/mdl/files/for/ICs") - Analogous to ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` but for :term:`LBCs` instead of :term:`ICs`. - Directory containing external model files for generating LBCs. If ``USE_USER_STAGED_EXTRN_FILES`` is set to "TRUE", the workflow looks within this directory for a subdirectory named "YYYYMMDDHH", which contains the external model files specified by the array ``EXTRN_MDL_FILES_LBCS``. This "YYYYMMDDHH" subdirectory corresponds to the start date and cycle hour of the forecast (see :ref:`above `). These files will be used to generate the :term:`LBCs` on the native FV3-LAM grid. This variable is not used if ``USE_USER_STAGED_EXTRN_FILES`` is set to "FALSE". +A standard set of environment variables has been established for *nco* mode to simplify the production workflow and improve the troubleshooting process for operational and preoperational models. These variables are only used in *nco* mode (i.e., when ``RUN_ENVIR: "nco"``). When non-default parameters are selected for the variables in this section, they should be added to the ``nco:`` section of the ``config.yaml`` file. -``EXTRN_MDL_FILES_LBCS``: (Default: " "LBCS_file1" "LBCS_file2" "...") - Analogous to ``EXTRN_MDL_FILES_ICS`` but for :term:`LBCs` instead of :term:`ICs`. Array containing the file names to search for in the ``EXTRN_MDL_SOURCE_BASEDIR_LBCS`` directory. This variable is not used if ``USE_USER_STAGED_EXTRN_FILES`` is set to "FALSE". +.. note:: + Only *community* mode is fully supported for this release. *nco* mode is used by those at the Environmental Modeling Center (EMC) and Global Systems Laboratory (GSL) who are working on pre-implementation operational testing. Other users should run the SRW App in *community* mode. +``envir, NET, model_ver, RUN``: + Standard environment variables defined in the NCEP Central Operations WCOSS Implementation Standards document. These variables are used in forming the path to various directories containing input, output, and workflow files. The variables are defined in the `WCOSS Implementation Standards `__ document (pp. 4-5) as follows: -NOMADS Parameters -====================== + ``envir``: (Default: "para") + Set to "test" during the initial testing phase, "para" when running in parallel (on a schedule), and "prod" in production. -Set parameters associated with NOMADS online data. + ``NET``: (Default: "rrfs") + Model name (first level of ``com`` directory structure) -``NOMADS``: (Default: "FALSE") - Flag controlling whether to use NOMADS online data. + ``model_ver``: (Default: "v1.0.0") + Version number of package in three digits (second level of ``com`` directory) -``NOMADS_file_type``: (Default: "nemsio") - Flag controlling the format of the data. Valid values: ``"GRIB2"`` | ``"grib2"`` | ``"NEMSIO"`` | ``"nemsio"`` + ``RUN``: (Default: "rrfs") + Name of model run (third level of ``com`` directory structure). In general, same as ``$NET``. -.. _CCPP_Params: +``OPSROOT``: (Default: "") + The operations root directory in *nco* mode. -CCPP Parameter -=============== -``CCPP_PHYS_SUITE``: (Default: "FV3_GFS_v16") - This parameter indicates which :term:`CCPP` (Common Community Physics Package) physics suite to use for the forecast(s). The choice of physics suite determines the forecast model's namelist file, the diagnostics table file, the field table file, and the XML physics suite definition file, which are staged in the experiment directory or the :term:`cycle` directories under it. - - **Current supported settings for this parameter are:** +WORKFLOW SWITCHES Configuration Parameters +============================================= - | ``"FV3_GFS_v16"`` - | ``"FV3_RRFS_v1beta"`` - | ``"FV3_HRRR"`` - | ``"FV3_WoFS_v0"`` +These parameters set flags that determine whether various workflow tasks should be run. When non-default parameters are selected for the variables in this section, they should be added to the ``workflow_switches:`` section of the ``config.yaml`` file. Note that the ``MAKE_GRID_TN``, ``MAKE_OROG_TN``, and ``MAKE_SFC_CLIMO_TN`` are all :term:`cycle-independent` tasks, i.e., if they are run, they only run once at the beginning of the workflow before any cycles are run. - **Other valid values include:** +Baseline Workflow Tasks +-------------------------- - | ``"FV3_GFS_2017_gfdlmp"`` - | ``"FV3_GFS_2017_gfdlmp_regional"`` - | ``"FV3_GFS_v15p2"`` - | ``"FV3_GFS_v15_thompson_mynn_lam3km"`` +``RUN_TASK_MAKE_GRID``: (Default: true) + Flag that determines whether to run the grid file generation task (``MAKE_GRID_TN``). If this is set to true, the grid generation task is run and new grid files are generated. If it is set to false, then the scripts look for pre-generated grid files in the directory specified by ``GRID_DIR`` (see :numref:`Section %s ` below). Valid values: ``True`` | ``False`` -Stochastic Physics Parameters -================================ +``RUN_TASK_MAKE_OROG``: (Default: true) + Same as ``RUN_TASK_MAKE_GRID`` but for the orography generation task (``MAKE_OROG_TN``). Flag that determines whether to run the orography file generation task (``MAKE_OROG_TN``). If this is set to true, the orography generation task is run and new orography files are generated. If it is set to false, then the scripts look for pre-generated orography files in the directory specified by ``OROG_DIR`` (see :numref:`Section %s ` below). Valid values: ``True`` | ``False`` -For the most updated and detailed documentation of these parameters, see the `UFS Stochastic Physics Documentation `__. +``RUN_TASK_MAKE_SFC_CLIMO``: (Default: true) + Same as ``RUN_TASK_MAKE_GRID`` but for the surface climatology generation task (``MAKE_SFC_CLIMO_TN``). Flag that determines whether to run the surface climatology file generation task (``MAKE_SFC_CLIMO_TN``). If this is set to true, the surface climatology generation task is run and new surface climatology files are generated. If it is set to false, then the scripts look for pre-generated surface climatology files in the directory specified by ``SFC_CLIMO_DIR`` (see :numref:`Section %s ` below). Valid values: ``True`` | ``False`` -``NEW_LSCALE``: (Default: "TRUE") - Use correct formula for converting a spatial legnth scale into spectral space. +``RUN_TASK_GET_EXTRN_ICS``: (Default: true) + Flag that determines whether to run the ``GET_EXTRN_ICS_TN`` task. -Specific Humidity (SHUM) Perturbation Parameters ---------------------------------------------------- +``RUN_TASK_GET_EXTRN_LBCS``: (Default: true) + Flag that determines whether to run the ``GET_EXTRN_LBCS_TN`` task. -``DO_SHUM``: (Default: "FALSE") - Flag to turn Specific Humidity (SHUM) perturbations on or off. SHUM perturbations multiply the low-level specific humidity by a small random number at each time-step. The SHUM scheme attempts to address missing physics phenomena (e.g., cold pools, gust fronts) most active in convective regions. +``RUN_TASK_MAKE_ICS``: (Default: true) + Flag that determines whether to run the ``MAKE_ICS_TN`` task. -``ISEED_SHUM``: (Default: "2") - Seed for setting the SHUM random number sequence. +``RUN_TASK_MAKE_LBCS``: (Default: true) + Flag that determines whether to run the ``MAKE_LBCS_TN`` task. -``SHUM_MAG``: (Default: "0.006") - Amplitudes of random patterns. Corresponds to the variable ``shum`` in ``input.nml``. +``RUN_TASK_RUN_FCST``: (Default: true) + Flag that determines whether to run the ``RUN_FCST_TN`` task. -``SHUM_LSCALE``: (Default: "150000") - Decorrelation spatial scale in meters. +``RUN_TASK_RUN_POST``: (Default: true) + Flag that determines whether to run the ``RUN_POST_TN`` task. Valid values: ``True`` | ``False`` -``SHUM_TSCALE``: (Default: "21600") - Decorrelation timescale in seconds. Corresponds to the variable ``shum_tau`` in ``input.nml``. +.. _VXTasks: -``SHUM_INT``: (Default: "3600") - Interval in seconds to update random pattern (optional). Perturbations still get applied at every time-step. Corresponds to the variable ``shumint`` in ``input.nml``. +Verification Tasks +-------------------- -.. _SPPT: +``RUN_TASK_GET_OBS_CCPA``: (Default: false) + Flag that determines whether to run the ``GET_OBS_CCPA_TN`` task, which retrieves the :term:`CCPA` hourly precipitation files used by METplus from NOAA :term:`HPSS`. See :numref:`Section %s ` for additional parameters related to this task. -Stochastically Perturbed Physics Tendencies (SPPT) Parameters ------------------------------------------------------------------ +``RUN_TASK_GET_OBS_MRMS``: (Default: false) + Flag that determines whether to run the ``GET_OBS_MRMS_TN`` task, which retrieves the :term:`MRMS` composite reflectivity files used by METplus from NOAA HPSS. See :numref:`Section %s ` for additional parameters related to this task. -SPPT perturbs full physics tendencies *after* the call to the physics suite, unlike :ref:`SPP ` (below), which perturbs specific tuning parameters within a physics scheme. +``RUN_TASK_GET_OBS_NDAS``: (Default: false) + Flag that determines whether to run the ``GET_OBS_NDAS_TN`` task, which retrieves the :term:`NDAS` PrepBufr files used by METplus from NOAA HPSS. See :numref:`Section %s ` for additional parameters related to this task. -``DO_SPPT``: (Default: "FALSE") - Flag to turn Stochastically Perturbed Physics Tendencies (SPPT) on or off. SPPT multiplies the physics tendencies by a random number between 0 and 2 before updating the model state. This addresses error in the physics parameterizations (either missing physics or unresolved subgrid processes). It is most active in the boundary layer and convective regions. +``RUN_TASK_VX_GRIDSTAT``: (Default: false) + Flag that determines whether to run the grid-stat verification task. The :ref:`MET Grid-Stat tool ` provides verification statistics for a matched forecast and observation grid. See :numref:`Section %s ` for additional parameters related to this task. Valid values: ``True`` | ``False`` -``ISEED_SPPT``: (Default: "1") - Seed for setting the SPPT random number sequence. +``RUN_TASK_VX_POINTSTAT``: (Default: false) + Flag that determines whether to run the point-stat verification task. The :ref:`MET Point-Stat tool ` provides verification statistics for forecasts at observation points (as opposed to over gridded analyses). See :numref:`Section %s ` for additional parameters related to this task. Valid values: ``True`` | ``False`` -``SPPT_MAG``: (Default: "0.7") - Amplitude of random patterns. Corresponds to the variable ``sppt`` in ``input.nml``. +``RUN_TASK_VX_ENSGRID``: (Default: false) + Flag that determines whether to run the ensemble-stat verification for gridded data task. The :ref:`MET Ensemble-Stat tool ` provides verification statistics for ensemble forecasts and can be used in conjunction with the :ref:`MET Grid-Stat tool `. See :numref:`Section %s ` for additional parameters related to this task. Valid values: ``True`` | ``False`` -``SPPT_LOGIT``: (Default: "TRUE") - Limits the SPPT perturbations to between 0 and 2. Should be "TRUE"; otherwise the model will crash. +``RUN_TASK_VX_ENSPOINT``: (Default: false) + Flag that determines whether to run the ensemble point verification task. If this flag is set, both ensemble-stat point verification and point verification of ensemble-stat output is computed. The :ref:`MET Ensemble-Stat tool ` provides verification statistics for ensemble forecasts and can be used in conjunction with the :ref:`MET Point-Stat tool `. See :numref:`Section %s ` for additional parameters related to this task. Valid values: ``True`` | ``False`` -``SPPT_LSCALE``: (Default: "150000") - Decorrelation spatial scale in meters. +.. COMMENT: COMMENT: Define "ensemble-stat verification for gridded data," "ensemble point verification," "ensemble-stat point verification," and "point verification of ensemble-stat output"? -``SPPT_TSCALE``: (Default: "21600") - Decorrelation timescale in seconds. Corresponds to the variable ``sppt_tau`` in ``input.nml``. - -``SPPT_INT``: (Default: "3600") - Interval in seconds to update random pattern (optional parameter). Perturbations still get applied at every time-step. Corresponds to the variable ``spptint`` in ``input.nml``. +Plotting Task +---------------- -``SPPT_SFCLIMIT``: (Default: "TRUE") - When "TRUE", tapers the SPPT perturbations to zero at the model's lowest level, which reduces model crashes. +``RUN_TASK_PLOT_ALLVARS:`` (Default: false) + Flag that determines whether to run python plotting scripts. -``USE_ZMTNBLCK``: (Default: "FALSE") - When "TRUE", do not apply perturbations below the dividing streamline that is diagnosed by the gravity wave drag, mountain blocking scheme +.. _make-grid: -Stochastic Kinetic Energy Backscatter (SKEB) Parameters ----------------------------------------------------------- +MAKE_GRID Configuration Parameters +====================================== -``DO_SKEB``: (Default: "FALSE") - Flag to turn Stochastic Kinetic Energy Backscatter (SKEB) on or off. SKEB adds wind perturbations to the model state. Perturbations are random in space/time, but amplitude is determined by a smoothed dissipation estimate provided by the :term:`dynamical core`. SKEB addresses errors in the dynamics more active in the mid-latitudes. +Non-default parameters for the ``make_grid`` task are set in the ``task_make_grid:`` section of the ``config.yaml`` file. -``ISEED_SKEB``: (Default: "3") - Seed for setting the SHUM random number sequence. +Basic Task Parameters +-------------------------- -``SKEB_MAG``: (Default: "0.5") - Amplitude of random patterns. Corresponds to the variable ``skeb`` in ``input.nml``. +For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. Typically, users do not need to adjust the default values. -``SKEB_LSCALE``: (Default: "150000") - Decorrelation spatial scale in meters. + ``MAKE_GRID_TN``: (Default: "make_grid") + Set the name of this :term:`cycle-independent` Rocoto workflow task. Users typically do not need to change this value. -``SKEB_TSCALE``: (Default: "21600") - Decorrelation timescale in seconds. Corresponds to the variable ``skeb_tau`` in ``input.nml``. + ``NNODES_MAKE_GRID``: (Default: 1) + Number of nodes to use for the job. -``SKEB_INT``: (Default: "3600") - Interval in seconds to update random pattern (optional). Perturbations still get applied every time-step. Corresponds to the variable ``skebint`` in ``input.nml``. + ``PPN_MAKE_GRID``: (Default: 24) + Number of :term:`MPI` processes per node. -``SKEBNORM``: (Default: "1") - Patterns: - * 0-random pattern is stream function - * 1-pattern is K.E. norm - * 2-pattern is vorticity + ``WTIME_MAKE_GRID``: (Default: 00:20:00) + Maximum time for the task to complete. -``SKEB_VDOF``: (Default: "10") - The number of degrees of freedom in the vertical direction for the SKEB random pattern. + ``MAXTRIES_MAKE_GRID``: (Default: 2) + Maximum number of times to attempt the task. -.. _SPP: + ``GRID_DIR``: (Default: "") + The directory containing pre-generated grid files when ``RUN_TASK_MAKE_GRID`` is set to false. -Parameters for Stochastically Perturbed Parameterizations (SPP) ------------------------------------------------------------------- +.. _ESGgrid: -SPP perturbs specific tuning parameters within a physics :term:`parameterization ` (unlike :ref:`SPPT `, which multiplies overall physics tendencies by a random perturbation field *after* the call to the physics suite). Each SPP option is an array, applicable (in order) to the :term:`RAP`/:term:`HRRR`-based parameterization listed in ``SPP_VAR_LIST``. Enter each value of the array in ``config.sh`` as shown below without commas or single quotes (e.g., ``SPP_VAR_LIST=( "pbl" "sfc" "mp" "rad" "gwd"`` ). Both commas and single quotes will be added by Jinja when creating the namelist. +ESGgrid Settings +------------------- -.. note:: - SPP is currently only available for specific physics schemes used in the RAP/HRRR physics suite. Users need to be aware of which :term:`SDF` is chosen when turning this option on. Among the supported physics suites, the full set of parameterizations can only be used with the ``FV3_HRRR`` option for ``CCPP_PHYS_SUITE``. +The following parameters must be set if using the "ESGgrid" method to generate a regional grid (i.e., when ``GRID_GEN_METHOD: "ESGgrid"``, see :numref:`Section %s `). If a different ``GRID_GEN_METHOD`` is used, these parameters will be ignored. When using a predefined grid with ``GRID_GEN_METHOD: "ESGgrid"``, the values in this section will be set automatically to the assigned values for that grid. -``DO_SPP``: (Default: "false") - Flag to turn SPP on or off. SPP perturbs parameters or variables with unknown or uncertain magnitudes within the physics code based on ranges provided by physics experts. +``ESGgrid_LON_CTR``: (Default: "") + The longitude of the center of the grid (in degrees). -``ISEED_SPP``: (Default: ( "4" "4" "4" "4" "4" ) ) - Seed for setting the random number sequence for the perturbation pattern. +``ESGgrid_LAT_CTR``: (Default: "") + The latitude of the center of the grid (in degrees). -``SPP_MAG_LIST``: (Default: ( "0.2" "0.2" "0.75" "0.2" "0.2" ) ) - SPP perturbation magnitudes used in each parameterization. Corresponds to the variable ``spp_prt_list`` in ``input.nml`` +``ESGgrid_DELX``: (Default: "") + The cell size in the zonal direction of the regional grid (in meters). -``SPP_LSCALE``: (Default: ( "150000.0" "150000.0" "150000.0" "150000.0" "150000.0" ) ) - Decorrelation spatial scales in meters. - -``SPP_TSCALE``: (Default: ( "21600.0" "21600.0" "21600.0" "21600.0" "21600.0" ) ) - Decorrelation timescales in seconds. Corresponds to the variable ``spp_tau`` in ``input.nml``. - -``SPP_SIGTOP1``: (Default: ( "0.1" "0.1" "0.1" "0.1" "0.1") ) - Controls vertical tapering of perturbations at the tropopause and corresponds to the lower sigma level at which to taper perturbations to zero. - -``SPP_SIGTOP2``: (Default: ( "0.025" "0.025" "0.025" "0.025" "0.025" ) ) - Controls vertical tapering of perturbations at the tropopause and corresponds to the upper sigma level at which to taper perturbations to zero. - -``SPP_STDDEV_CUTOFF``: (Default: ( "1.5" "1.5" "2.5" "1.5" "1.5" ) ) - Limit for possible perturbation values in standard deviations from the mean. - -``SPP_VAR_LIST``: (Default: ( "pbl" "sfc" "mp" "rad" "gwd" ) ) - The list of parameterizations to perturb: planetary boundary layer (PBL), surface physics (SFC), microphysics (MP), radiation (RAD), gravity wave drag (GWD). Valid values: ``"pbl"`` | ``"sfc"`` | ``"rad"`` | ``"gwd"`` | ``"mp"`` - - -Land Surface Model (LSM) SPP -------------------------------- - -Land surface perturbations can be applied to land model parameters and land model prognostic variables. The LSM scheme is intended to address errors in the land model and land-atmosphere interactions. LSM perturbations include soil moisture content (SMC) (volume fraction), vegetation fraction (VGF), albedo (ALB), salinity (SAL), emissivity (EMI), surface roughness (ZOL) (in cm), and soil temperature (STC). Perturbations to soil moisture content (SMC) are only applied at the first time step. Only five perturbations at a time can be applied currently, but all seven are shown below. In addition, only one unique *iseed* value is allowed at the moment, and it is used for each pattern. - -The parameters below turn on SPP in Noah or RUC LSM (support for Noah MP is in progress). Please be aware of the :term:`SDF` that you choose if you wish to turn on Land Surface Model (LSM) SPP. SPP in LSM schemes is handled in the ``&nam_sfcperts`` namelist block instead of in ``&nam_sppperts``, where all other SPP is implemented. The default perturbation frequency is determined by the ``fhcyc`` namelist entry. Since that parameter is set to zero in the SRW App, use ``LSM_SPP_EACH_STEP`` to perturb every time step. - -``DO_LSM_SPP``: (Default: "false") - Turns on Land Surface Model (LSM) Stochastic Physics Parameterizations (SPP). When "TRUE", sets ``lndp_type=2``, which applies land perturbations to the selected paramaters using a newer scheme designed for data assimilation (DA) ensemble spread. LSM SPP perturbs uncertain land surface fields ("smc" "vgf" "alb" "sal" "emi" "zol" "stc") based on recommendations from physics experts. - -``LSM_SPP_TSCALE``: (Default: ( ( "21600" "21600" "21600" "21600" "21600" "21600" "21600" ) ) ) - Decorrelation timescales in seconds. - -``LSM_SPP_LSCALE``: (Default: ( ( "150000" "150000" "150000" "150000" "150000" "150000" "150000" ) ) ) - Decorrelation spatial scales in meters. - -``ISEED_LSM_SPP``: (Default: ("9") ) - Seed to initialize the random perturbation pattern. - -``LSM_SPP_VAR_LIST``: (Default: ( ( "smc" "vgf" "alb" "sal" "emi" "zol" "stc" ) ) ) - Indicates which LSM variables to perturb. - -``LSM_SPP_MAG_LIST``: (Default: ( ( "0.2" "0.001" "0.001" "0.001" "0.001" "0.001" "0.2" ) ) ) - Sets the maximum random pattern amplitude for each of the LSM perturbations. - -``LSM_SPP_EACH_STEP``: (Default: "true") - When set to "TRUE", it sets ``lndp_each_step=.true.`` and perturbs each time step. - -.. _PredefGrid: - -Predefined Grid Parameters -========================== -``PREDEF_GRID_NAME``: (Default: "") - This parameter indicates which (if any) predefined regional grid to use for the experiment. Setting ``PREDEF_GRID_NAME`` provides a convenient method of specifying a commonly used set of grid-dependent parameters. The predefined grid settings can be viewed in the script ``ush/set_predef_grid_params.sh``. - - **Currently supported options:** - - | ``"RRFS_CONUS_25km"`` - | ``"RRFS_CONUS_13km"`` - | ``"RRFS_CONUS_3km"`` - | ``"SUBCONUS_Ind_3km"`` - - **Other valid values include:** - - | ``"CONUS_25km_GFDLgrid"`` - | ``"CONUS_3km_GFDLgrid"`` - | ``"EMC_AK"`` - | ``"EMC_HI"`` - | ``"EMC_PR"`` - | ``"EMC_GU"`` - | ``"GSL_HAFSV0.A_25km"`` - | ``"GSL_HAFSV0.A_13km"`` - | ``"GSL_HAFSV0.A_3km"`` - | ``"GSD_HRRR_AK_50km"`` - | ``"RRFS_AK_13km"`` - | ``"RRFS_AK_3km"`` - | ``"RRFS_CONUScompact_25km"`` - | ``"RRFS_CONUScompact_13km"`` - | ``"RRFS_CONUScompact_3km"`` - | ``"RRFS_NA_13km"`` - | ``"RRFS_NA_3km"`` - | ``"RRFS_SUBCONUS_3km"`` - | ``"WoFS_3km"`` - -.. note:: - - * If ``PREDEF_GRID_NAME`` is set to a valid predefined grid name, the grid generation method, the (native) grid parameters, and the write component grid parameters are set to predefined values for the specified grid, overwriting any settings of these parameters in the user-specified experiment configuration file (``config.sh``). In addition, if the time step ``DT_ATMOS`` and the computational parameters (``LAYOUT_X``, ``LAYOUT_Y``, and ``BLOCKSIZE``) are not specified in that configuration file, they are also set to predefined values for the specified grid. - - * If ``PREDEF_GRID_NAME`` is set to an empty string, it implies that the user will provide the native grid parameters in the user-specified experiment configuration file (``config.sh``). In this case, the grid generation method, the native grid parameters, the write component grid parameters, the main time step (``DT_ATMOS``), and the computational parameters (``LAYOUT_X``, ``LAYOUT_Y``, and ``BLOCKSIZE``) must be set in the configuration file. Otherwise, the values of the parameters in the default experiment configuration file (``config_defaults.sh``) will be used. - - -.. _ConfigParameters: - -Grid Generation Parameters -========================== -``GRID_GEN_METHOD``: (Default: "") - This variable specifies which method to use to generate a regional grid in the horizontal plane. The values that it can take on are: - - * **"ESGgrid":** The "ESGgrid" method will generate a regional version of the Extended Schmidt Gnomonic (ESG) grid using the map projection developed by Jim Purser of EMC (:cite:t:`Purser_2020`). "ESGgrid" is the preferred grid option. - - * **"GFDLgrid":** The "GFDLgrid" method first generates a "parent" global cubed-sphere grid. Then a portion from tile 6 of the global grid is used as the regional grid. This regional grid is referred to in the grid generation scripts as "tile 7," even though it does not correspond to a complete tile. The forecast is run only on the regional grid (i.e., on tile 7, not on tiles 1 through 6). Note that the "GFDLgrid" method is the legacy grid generation method. It is not supported in *all* predefined domains. - -.. attention:: - - If the experiment uses a **predefined grid** (i.e., if ``PREDEF_GRID_NAME`` is set to the name of a valid predefined grid), then ``GRID_GEN_METHOD`` will be reset to the value of ``GRID_GEN_METHOD`` for that grid. This will happen regardless of whether ``GRID_GEN_METHOD`` is assigned a value in the experiment configuration file; any value assigned will be overwritten. - -.. note:: - - If the experiment uses a **user-defined grid** (i.e., if ``PREDEF_GRID_NAME`` is set to a null string), then ``GRID_GEN_METHOD`` must be set in the experiment configuration file. Otherwise, the experiment generation will fail because the generation scripts check to ensure that the grid name is set to a non-empty string before creating the experiment directory. - -.. _ESGgrid: - -ESGgrid Settings -------------------- - -The following parameters must be set if using the "ESGgrid" method to generate a regional grid (i.e., when ``GRID_GEN_METHOD="ESGgrid"``). - -``ESGgrid_LON_CTR``: (Default: "") - The longitude of the center of the grid (in degrees). - -``ESGgrid_LAT_CTR``: (Default: "") - The latitude of the center of the grid (in degrees). - -``ESGgrid_DELX``: (Default: "") - The cell size in the zonal direction of the regional grid (in meters). - -``ESGgrid_DELY``: (Default: "") - The cell size in the meridional direction of the regional grid (in meters). +``ESGgrid_DELY``: (Default: "") + The cell size in the meridional direction of the regional grid (in meters). ``ESGgrid_NX``: (Default: "") The number of cells in the zonal direction on the regional grid. @@ -663,7 +555,7 @@ The following parameters must be set if using the "ESGgrid" method to generate a The rotational parameter for the "ESGgrid" (in degrees). ``ESGgrid_WIDE_HALO_WIDTH``: (Default: "") - The width (in number of grid cells) of the :term:`halo` to add around the regional grid before shaving the halo down to the width(s) expected by the forecast model. + The width (in number of grid cells) of the :term:`halo` to add around the regional grid before shaving the halo down to the width(s) expected by the forecast model. The user need not specify this variable since it is set automatically in ``set_gridparams_ESGgrid.py`` .. _WideHalo: @@ -673,7 +565,9 @@ The following parameters must be set if using the "ESGgrid" method to generate a GFDLgrid Settings --------------------- -The following parameters must be set if using the "GFDLgrid" method to generate a regional grid (i.e., when ``GRID_GEN_METHOD="GFDLgrid"``). Note that the regional grid is defined with respect to a "parent" global cubed-sphere grid. Thus, all the parameters for a global cubed-sphere grid must be specified even though the model equations are integrated only on the regional grid. Tile 6 has arbitrarily been chosen as the tile to use to orient the global parent grid on the sphere (Earth). For convenience, the regional grid is denoted as "tile 7" even though it is embedded within tile 6 (i.e., it doesn't extend beyond the boundary of tile 6). Its exact location within tile 6 is determined by specifying the starting and ending i- and j-indices of the regional grid on tile 6, where ``i`` is the grid index in the x direction and ``j`` is the grid index in the y direction. All of this information is set in the variables below. +The following parameters must be set if using the "GFDLgrid" method to generate a regional grid (i.e., when ``GRID_GEN_METHOD: "GFDLgrid"``, see :numref:`Section %s `). If a different ``GRID_GEN_METHOD`` is used, these parameters will be ignored. When using a predefined grid with ``GRID_GEN_METHOD: "GFDLgrid"``, the values in this section will be set automatically to the assigned values for that grid. + +Note that the regional grid is defined with respect to a "parent" global cubed-sphere grid. Thus, all the parameters for a global cubed-sphere grid must be specified even though the model equations are integrated only on the regional grid. Tile 6 has arbitrarily been chosen as the tile to use to orient the global parent grid on the sphere (Earth). For convenience, the regional grid is denoted as "tile 7" even though it is embedded within tile 6 (i.e., it doesn't extend beyond the boundary of tile 6). Its exact location within tile 6 is determined by specifying the starting and ending i- and j-indices of the regional grid on tile 6, where ``i`` is the grid index in the x direction and ``j`` is the grid index in the y direction. All of this information is set in the variables below. ``GFDLgrid_LON_T6_CTR``: (Default: "") Longitude of the center of tile 6 (in degrees). @@ -682,30 +576,29 @@ The following parameters must be set if using the "GFDLgrid" method to generate Latitude of the center of tile 6 (in degrees). ``GFDLgrid_NUM_CELLS``: (Default: "") - Number of grid cells in either of the two horizontal directions (x and y) on each of the six tiles of the parent global cubed-sphere grid. Valid values: ``"48"`` | ``"96"`` | ``"192"`` | ``"384"`` | ``"768"`` | ``"1152"`` | ``"3072"`` + Number of grid cells in either of the two horizontal directions (x and y) on each of the six tiles of the parent global cubed-sphere grid. Valid values: ``48`` | ``96`` | ``192`` | ``384`` | ``768`` | ``1152`` | ``3072`` - To give an idea of what these values translate to in terms of grid cell size in kilometers, we list below the approximate grid cell size on a uniform global grid having the specified value of ``GFDLgrid_NUM_CELLS``, where by "uniform" we mean with Schmidt stretch factor ``GFDLgrid_STRETCH_FAC="1"`` (although in regional applications ``GFDLgrid_STRETCH_FAC`` will typically be set to a value greater than ``"1"`` to obtain a smaller grid size on tile 6): + To give an idea of what these values translate to in terms of grid cell size in kilometers, we list below the approximate grid cell size on a uniform global grid having the specified value of ``GFDLgrid_NUM_CELLS``, where by "uniform" we mean with Schmidt stretch factor ``GFDLgrid_STRETCH_FAC: "1"`` (although in regional applications ``GFDLgrid_STRETCH_FAC`` will typically be set to a value greater than ``"1"`` to obtain a smaller grid size on tile 6): +---------------------+--------------------+ - | GFDLgrid_NUM_CELLS | typical cell size | + | GFDLgrid_NUM_CELLS | Typical Cell Size | +=====================+====================+ - | 48 | 208 km | + | 48 | 200 km | +---------------------+--------------------+ - | 96 | 104 km | + | 96 | 100 km | +---------------------+--------------------+ - | 192 | 52 km | + | 192 | 50 km | +---------------------+--------------------+ - | 384 | 26 km | + | 384 | 25 km | +---------------------+--------------------+ | 768 | 13 km | +---------------------+--------------------+ - | 1152 | 8.7 km | + | 1152 | 8.5 km | +---------------------+--------------------+ - | 3072 | 3.3 km | + | 3072 | 3.2 km | +---------------------+--------------------+ - Note that these are only typical cell sizes. The actual cell size on the global grid tiles varies somewhat as we move across a tile (and is dependent on ``GFDLgrid_STRETCH_FAC``). - + Note that these are only typical cell sizes. The actual cell size on the global grid tiles varies somewhat as we move across a tile and is also dependent on ``GFDLgrid_STRETCH_FAC``, which modifies the shape and size of the tile. ``GFDLgrid_STRETCH_FAC``: (Default: "") Stretching factor used in the Schmidt transformation applied to the parent cubed-sphere grid. Setting the Schmidt stretching factor to a value greater than 1 shrinks tile 6, while setting it to a value less than 1 (but still greater than 0) expands it. The remaining 5 tiles change shape as necessary to maintain global coverage of the grid. @@ -726,624 +619,1358 @@ The following parameters must be set if using the "GFDLgrid" method to generate j-index on tile 6 at which the regional grid (tile 7) ends. ``GFDLgrid_USE_NUM_CELLS_IN_FILENAMES``: (Default: "") - Flag that determines the file naming convention to use for grid, orography, and surface climatology files (or, if using pregenerated files, the naming convention that was used to name these files). These files usually start with the string ``"C${RES}_"``, where ``RES`` is an integer. In the global forecast model, ``RES`` is the number of points in each of the two horizontal directions (x and y) on each tile of the global grid (defined here as ``GFDLgrid_NUM_CELLS``). If this flag is set to "TRUE", ``RES`` will be set to ``GFDLgrid_NUM_CELLS`` just as in the global forecast model. If it is set to "FALSE", we calculate (in the grid generation task) an "equivalent global uniform cubed-sphere resolution" -- call it ``RES_EQUIV`` -- and then set ``RES`` equal to it. ``RES_EQUIV`` is the number of grid points in each of the x and y directions on each tile that a global UNIFORM (i.e., stretch factor of 1) cubed-sphere grid would need to have in order to have the same average grid size as the regional grid. This is a more useful indicator of the grid size because it takes into account the effects of ``GFDLgrid_NUM_CELLS``, ``GFDLgrid_STRETCH_FAC``, and ``GFDLgrid_REFINE_RATIO`` in determining the regional grid's typical grid size, whereas simply setting ``RES`` to ``GFDLgrid_NUM_CELLS`` doesn't take into account the effects of ``GFDLgrid_STRETCH_FAC`` and ``GFDLgrid_REFINE_RATIO`` on the regional grid's resolution. Nevertheless, some users still prefer to use ``GFDLgrid_NUM_CELLS`` in the file names, so we allow for that here by setting this flag to "TRUE". - -Computational Forecast Parameters -================================= + Flag that determines the file naming convention to use for grid, orography, and surface climatology files (or, if using pregenerated files, the naming convention that was used to name these files). These files usually start with the string ``"C${RES}_"``, where ``RES`` is an integer. In the global forecast model, ``RES`` is the number of points in each of the two horizontal directions (x and y) on each tile of the global grid (defined here as ``GFDLgrid_NUM_CELLS``). If this flag is set to true, ``RES`` will be set to ``GFDLgrid_NUM_CELLS`` just as in the global forecast model. If it is set to false, we calculate (in the grid generation task) an "equivalent global uniform cubed-sphere resolution" --- call it ``RES_EQUIV`` --- and then set ``RES`` equal to it. ``RES_EQUIV`` is the number of grid points in each of the x and y directions on each tile that a global UNIFORM (i.e., stretch factor of 1) cubed-sphere grid would need to have in order to have the same average grid size as the regional grid. This is a more useful indicator of the grid size because it takes into account the effects of ``GFDLgrid_NUM_CELLS``, ``GFDLgrid_STRETCH_FAC``, and ``GFDLgrid_REFINE_RATIO`` in determining the regional grid's typical grid size, whereas simply setting ``RES`` to ``GFDLgrid_NUM_CELLS`` doesn't take into account the effects of ``GFDLgrid_STRETCH_FAC`` and ``GFDLgrid_REFINE_RATIO`` on the regional grid's resolution. Nevertheless, some users still prefer to use ``GFDLgrid_NUM_CELLS`` in the file names, so we allow for that here by setting this flag to true. -``LAYOUT_X, LAYOUT_Y``: (Default: "") - The number of :term:`MPI` tasks (processes) to use in the two horizontal directions (x and y) of the regional grid when running the forecast model. +.. _make-orog: + +MAKE_OROG Configuration Parameters +===================================== -``BLOCKSIZE``: (Default: "") - The amount of data that is passed into the cache at a time. +Non-default parameters for the ``make_orog`` task are set in the ``task_make_orog:`` section of the ``config.yaml`` file. -.. note:: +``MAKE_OROG_TN``: (Default: "make_orog") + Set the name of this :term:`cycle-independent` Rocoto workflow task. Users typically do not need to change this value. - In ``config_defaults.sh`` these parameters are set to null strings so that: +``NNODES_MAKE_OROG``: (Default: 1) + Number of nodes to use for the job. - #. If the experiment is using a predefined grid and the user sets the ``BLOCKSIZE`` parameter in the user-specified experiment configuration file (i.e., ``config.sh``), that value will be used in the forecast(s). Otherwise, the default ``BLOCKSIZE`` for that predefined grid will be used. - #. If the experiment is *not* using a predefined grid (i.e., it is using a custom grid whose parameters are specified in the experiment configuration file), then the user must specify a value for the ``BLOCKSIZE`` parameter in that configuration file. Otherwise, it will remain set to a null string, and the experiment generation will fail, because the generation scripts check to ensure that all the parameters defined in this section are set to non-empty strings before creating the experiment directory. +``PPN_MAKE_OROG``: (Default: 24) + Number of :term:`MPI` processes per node. -.. _WriteComp: +``WTIME_MAKE_OROG``: (Default: 00:20:00) + Maximum time for the task to complete. -Write-Component (Quilting) Parameters -====================================== +``MAXTRIES_MAKE_OROG``: (Default: 2) + Maximum number of times to attempt the task. -.. note:: - The :term:`UPP` (called by the ``RUN_POST_TN`` task) cannot process output on the native grid types ("GFDLgrid" and "ESGgrid"), so output fields are interpolated to a **write-component grid** before writing them to an output file. The output files written by the UFS Weather Model use an Earth System Modeling Framework (:term:`ESMF`) component, referred to as the **write component**. This model component is configured with settings in the ``model_configure`` file, as described in `Section 4.2.3 `__ of the UFS Weather Model documentation. +``KMP_AFFINITY_MAKE_OROG``: (Default: "disabled") + Intel Thread Affinity Interface for the ``make_orog`` task. See :ref:`this note ` for more information on thread affinity. Settings for the ``make_orog`` task is disabled because this task does not use parallelized code. -``QUILTING``: (Default: "TRUE") +``OMP_NUM_THREADS_MAKE_OROG``: (Default: 6) + The number of OpenMP threads to use for parallel regions. - .. attention:: - The regional grid requires the use of the write component, so users generally should not need to change the default value for ``QUILTING``. +``OMP_STACKSIZE_MAKE_OROG``: (Default: "2048m") + Controls the size of the stack for threads created by the OpenMP implementation. - Flag that determines whether to use the write component for writing forecast output files to disk. If set to "TRUE", the forecast model will output files named ``dynf$HHH.nc`` and ``phyf$HHH.nc`` (where ``HHH`` is the 3-digit forecast hour) containing dynamics and physics fields, respectively, on the write-component grid. For example, the output files for the 3rd hour of the forecast would be ``dynf$003.nc`` and ``phyf$003.nc``. (The regridding from the native FV3-LAM grid to the write-component grid is done by the forecast model.) If ``QUILTING`` is set to "FALSE", then the output file names are ``fv3_history.nc`` and ``fv3_history2d.nc``, and they contain fields on the native grid. Although the UFS Weather Model can run without quilting, the regional grid requires the use of the write component. Therefore, QUILTING should be set to "TRUE" when running the SRW App. If ``QUILTING`` is set to "FALSE", the ``RUN_POST_TN`` (meta)task cannot run because the :term:`UPP` code that this task calls cannot process fields on the native grid. In that case, the ``RUN_POST_TN`` (meta)task will be automatically removed from the Rocoto workflow XML. The :ref:`INLINE POST ` option also requires ``QUILTING`` to be set to "TRUE" in the SRW App. +``OROG_DIR``: (Default: "") + The directory containing pre-generated orography files to use when ``MAKE_OROG_TN`` is set to false. -``PRINT_ESMF``: (Default: "FALSE") - Flag that determines whether to output extra (debugging) information from :term:`ESMF` routines. Must be "TRUE" or "FALSE". Note that the write component uses ESMF library routines to interpolate from the native forecast model grid to the user-specified output grid (which is defined in the model configuration file ``model_configure`` in the forecast run directory). +.. _make-sfc-climo: -``WRTCMP_write_groups``: (Default: "1") - The number of write groups (i.e., groups of :term:`MPI` tasks) to use in the write component. +MAKE_SFC_CLIMO Configuration Parameters +=========================================== -``WRTCMP_write_tasks_per_group``: (Default: "20") - The number of MPI tasks to allocate for each write group. +Non-default parameters for the ``make_sfc_climo`` task are set in the ``task_make_sfc_climo:`` section of the ``config.yaml`` file. -``WRTCMP_output_grid``: (Default: "''") - Sets the type (coordinate system) of the write component grid. The default empty string forces the user to set a valid value for ``WRTCMP_output_grid`` in ``config.sh`` if specifying a *custom* grid. When creating an experiment with a user-defined grid, this parameter must be specified or the experiment will fail. Valid values: ``"lambert_conformal"`` | ``"regional_latlon"`` | ``"rotated_latlon"`` +``MAKE_SFC_CLIMO_TN``: "make_sfc_climo" + Set the name of this :term:`cycle-independent` Rocoto workflow task. Users typically do not need to change this value. -``WRTCMP_cen_lon``: (Default: "") - Longitude (in degrees) of the center of the write component grid. Can usually be set to the corresponding value from the native grid. +``NNODES_MAKE_SFC_CLIMO``: (Default: 2) + Number of nodes to use for the job. -``WRTCMP_cen_lat``: (Default: "") - Latitude (in degrees) of the center of the write component grid. Can usually be set to the corresponding value from the native grid. +``PPN_MAKE_SFC_CLIMO``: (Default: 24) + Number of :term:`MPI` processes per node. -``WRTCMP_lon_lwr_left``: (Default: "") - Longitude (in degrees) of the center of the lower-left (southwest) cell on the write component grid. If using the "rotated_latlon" coordinate system, this is expressed in terms of the rotated longitude. Must be set manually when running an experiment with a user-defined grid. +``WTIME_MAKE_SFC_CLIMO``: (Default: 00:20:00) + Maximum time for the task to complete. -``WRTCMP_lat_lwr_left``: (Default: "") - Latitude (in degrees) of the center of the lower-left (southwest) cell on the write component grid. If using the "rotated_latlon" coordinate system, this is expressed in terms of the rotated latitude. Must be set manually when running an experiment with a user-defined grid. +``MAXTRIES_MAKE_SFC_CLIMO``: (Default: 2) + Maximum number of times to attempt the task. -**The following parameters must be set when** ``WRTCMP_output_grid`` **is set to "rotated_latlon":** +``KMP_AFFINITY_MAKE_SFC_CLIMO``: (Default: "scatter") + Intel Thread Affinity Interface for the ``make_sfc_climo`` task. See :ref:`this note ` for more information on thread affinity. -``WRTCMP_lon_upr_rght``: (Default: "") - Longitude (in degrees) of the center of the upper-right (northeast) cell on the write component grid (expressed in terms of the rotated longitude). +``OMP_NUM_THREADS_MAKE_SFC_CLIMO``: (Default: 1) + The number of OpenMP threads to use for parallel regions. -``WRTCMP_lat_upr_rght``: (Default: "") - Latitude (in degrees) of the center of the upper-right (northeast) cell on the write component grid (expressed in terms of the rotated latitude). +``OMP_STACKSIZE_MAKE_SFC_CLIMO``: (Default: "1024m") + Controls the size of the stack for threads created by the OpenMP implementation. -``WRTCMP_dlon``: (Default: "") - Size (in degrees) of a grid cell on the write component grid (expressed in terms of the rotated longitude). +``SFC_CLIMO_DIR``: (Default: "") + The directory containing pre-generated surface climatology files to use when ``MAKE_SFC_CLIMO_TN`` is set to false. -``WRTCMP_dlat``: (Default: "") - Size (in degrees) of a grid cell on the write component grid (expressed in terms of the rotated latitude). +GET_EXTRN_ICS Configuration Parameters +========================================= -**The following parameters must be set when** ``WRTCMP_output_grid`` **is set to "lambert_conformal":** +Non-default parameters for the ``get_extrn_ics`` task are set in the ``task_get_extrn_ics:`` section of the ``config.yaml`` file. -``WRTCMP_stdlat1``: (Default: "") - First standard latitude (in degrees) in definition of Lambert conformal projection. +.. _basic-get-extrn-ics: -``WRTCMP_stdlat2``: (Default: "") - Second standard latitude (in degrees) in definition of Lambert conformal projection. +Basic Task Parameters +--------------------------------- -``WRTCMP_nx``: (Default: "") - Number of grid points in the x-coordinate of the Lambert conformal projection. +For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. -``WRTCMP_ny``: (Default: "") - Number of grid points in the y-coordinate of the Lambert conformal projection. +``GET_EXTRN_ICS_TN``: (Default: "get_extrn_ics") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. -``WRTCMP_dx``: (Default: "") - Grid cell size (in meters) along the x-axis of the Lambert conformal projection. +``NNODES_GET_EXTRN_ICS``: (Default: 1) + Number of nodes to use for the job. -``WRTCMP_dy``: (Default: "") - Grid cell size (in meters) along the y-axis of the Lambert conformal projection. +``PPN_GET_EXTRN_ICS``: (Default: 1) + Number of :term:`MPI` processes per node. -Pre-existing Directory Parameter -================================ -``PREEXISTING_DIR_METHOD``: (Default: "delete") - This variable determines how to deal with pre-existing directories (resulting from previous calls to the experiment generation script using the same experiment name [``EXPT_SUBDIR``] as the current experiment). This variable must be set to one of three valid values: ``"delete"``, ``"rename"``, or ``"quit"``. The behavior for each of these values is as follows: +``WTIME_GET_EXTRN_ICS``: (Default: 00:45:00) + Maximum time for the task to complete. - * **"delete":** The preexisting directory is deleted and a new directory (having the same name as the original preexisting directory) is created. +``MAXTRIES_GET_EXTRN_ICS``: (Default: 1) + Maximum number of times to attempt the task. - * **"rename":** The preexisting directory is renamed and a new directory (having the same name as the original pre-existing directory) is created. The new name of the preexisting directory consists of its original name and the suffix "_old###", where ``###`` is a 3-digit integer chosen to make the new name unique. +``EXTRN_MDL_NAME_ICS``: (Default: "FV3GFS") + The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` - * **"quit":** The preexisting directory is left unchanged, but execution of the currently running script is terminated. In this case, the preexisting directory must be dealt with manually before rerunning the script. +``EXTRN_MDL_ICS_OFFSET_HRS``: (Default: 0) + Users may wish to start a forecast using forecast data from a previous cycle of an external model. This variable indicates how many hours earlier the external model started than the FV3 forecast configured here. For example, if the forecast should start from a 6-hour forecast of the GFS, then ``EXTRN_MDL_ICS_OFFSET_HRS: "6"``. +``FV3GFS_FILE_FMT_ICS``: (Default: "nemsio") + If using the FV3GFS model as the source of the :term:`ICs` (i.e., if ``EXTRN_MDL_NAME_ICS: "FV3GFS"``), this variable specifies the format of the model files to use when generating the ICs. Valid values: ``"nemsio"`` | ``"grib2"`` | ``"netcdf"`` -Verbose Parameter -================= -``VERBOSE``: (Default: "TRUE") - Flag that determines whether the experiment generation and workflow task scripts print out extra informational messages. Valid values: ``"TRUE"`` | ``"true"`` | ``"YES"`` | ``"yes"`` | ``"FALSE"`` | ``"false"`` | ``"NO"`` | ``"no"`` +File and Directory Parameters +-------------------------------- -Debug Parameter -================= -``DEBUG``: (Default: "FALSE") - Flag that determines whether to print out very detailed debugging messages. Note that if DEBUG is set to TRUE, then VERBOSE will also be reset to TRUE if it isn't already. Valid values: ``"TRUE"`` | ``"true"`` | ``"YES"`` | ``"yes"`` | ``"FALSE"`` | ``"false"`` | ``"NO"`` | ``"no"`` - -.. _WFTasks: - -Rocoto Workflow Tasks -======================== - -Set the names of the various Rocoto workflow tasks. These names usually do not need to be changed. - -**Baseline Tasks:** - -| ``MAKE_GRID_TN``: (Default: "make_grid") -| ``MAKE_OROG_TN``: (Default: "make_orog") -| ``MAKE_SFC_CLIMO_TN``: (Default: "make_sfc_climo") -| ``GET_EXTRN_ICS_TN``: (Default: "get_extrn_ics") -| ``GET_EXTRN_LBCS_TN``: (Default: "get_extrn_lbcs") -| ``MAKE_ICS_TN``: (Default: "make_ics") -| ``MAKE_LBCS_TN``: (Default: "make_lbcs") -| ``RUN_FCST_TN``: (Default: "run_fcst") -| ``RUN_POST_TN``: (Default: "run_post") - -**METplus Verification Tasks:** When running METplus verification tasks, the following task names are also added to the Rocoto workflow: - -| ``GET_OBS``: (Default: "get_obs") -| ``GET_OBS_CCPA_TN``: (Default: "get_obs_ccpa") -| ``GET_OBS_MRMS_TN``: (Default: "get_obs_mrms") -| ``GET_OBS_NDAS_TN``: (Default: "get_obs_ndas") -| ``VX_TN``: (Default: "run_vx") -| ``VX_GRIDSTAT_TN``: (Default: "run_gridstatvx") -| ``VX_GRIDSTAT_REFC_TN``: (Default: "run_gridstatvx_refc") -| ``VX_GRIDSTAT_RETOP_TN``: (Default: "run_gridstatvx_retop") -| ``VX_GRIDSTAT_03h_TN``: (Default: "run_gridstatvx_03h") -| ``VX_GRIDSTAT_06h_TN``: (Default: "run_gridstatvx_06h") -| ``VX_GRIDSTAT_24h_TN``: (Default: "run_gridstatvx_24h") -| ``VX_POINTSTAT_TN``: (Default: "run_pointstatvx") -| ``VX_ENSGRID_TN``: (Default: "run_ensgridvx") -| ``VX_ENSGRID_03h_TN``: (Default: "run_ensgridvx_03h") -| ``VX_ENSGRID_06h_TN``: (Default: "run_ensgridvx_06h") -| ``VX_ENSGRID_24h_TN``: (Default: "run_ensgridvx_24h") -| ``VX_ENSGRID_REFC_TN``: (Default: "run_ensgridvx_refc") -| ``VX_ENSGRID_RETOP_TN``: (Default: "run_ensgridvx_retop") -| ``VX_ENSGRID_MEAN_TN``: (Default: "run_ensgridvx_mean") -| ``VX_ENSGRID_PROB_TN``: (Default: "run_ensgridvx_prob") -| ``VX_ENSGRID_MEAN_03h_TN``: (Default: "run_ensgridvx_mean_03h") -| ``VX_ENSGRID_PROB_03h_TN``: (Default: "run_ensgridvx_prob_03h") -| ``VX_ENSGRID_MEAN_06h_TN``: (Default: "run_ensgridvx_mean_06h") -| ``VX_ENSGRID_PROB_06h_TN``: (Default: "run_ensgridvx_prob_06h") -| ``VX_ENSGRID_MEAN_24h_TN``: (Default: "run_ensgridvx_mean_24h") -| ``VX_ENSGRID_PROB_24h_TN``: (Default: "run_ensgridvx_prob_24h") -| ``VX_ENSGRID_PROB_REFC_TN``: (Default: "run_ensgridvx_prob_refc") -| ``VX_ENSGRID_PROB_RETOP_TN``: (Default: "run_ensgridvx_prob_retop") -| ``VX_ENSPOINT_TN``: (Default: "run_enspointvx") -| ``VX_ENSPOINT_MEAN_TN``: (Default: "run_enspointvx_mean") -| ``VX_ENSPOINT_PROB_TN``: (Default: "run_enspointvx_prob") - - -Workflow Task Parameters -======================== -For each workflow task, additional parameters determine the values to pass to the job scheduler (e.g., Slurm), which submits a job for each task. Parameters include the number of nodes to use for the job, the number of :term:`MPI` processes per node, the maximum walltime to allow for the job to complete, and the maximum number of times to attempt each task. - -**Number of nodes:** - -| ``NNODES_MAKE_GRID``: (Default: "1") -| ``NNODES_MAKE_OROG``: (Default: "1") -| ``NNODES_MAKE_SFC_CLIMO``: (Default: "2") -| ``NNODES_GET_EXTRN_ICS``: (Default: "1") -| ``NNODES_GET_EXTRN_LBCS``: (Default: "1") -| ``NNODES_MAKE_ICS``: (Default: "4") -| ``NNODES_MAKE_LBCS``: (Default: "4") -| ``NNODES_RUN_FCST``: (Default: "") +``USE_USER_STAGED_EXTRN_FILES``: (Default: false) + Flag that determines whether the workflow will look for the external model files needed for generating :term:`ICs` in user-specified directories (rather than fetching them from mass storage like NOAA :term:`HPSS`). Valid values: ``True`` | ``False`` -.. note:: - The correct value for ``NNODES_RUN_FCST`` will be calculated in the workflow generation scripts. - -| ``NNODES_RUN_POST``: (Default: "2") -| ``NNODES_GET_OBS_CCPA``: (Default: "1") -| ``NNODES_GET_OBS_MRMS``: (Default: "1") -| ``NNODES_GET_OBS_NDAS``: (Default: "1") -| ``NNODES_VX_GRIDSTAT``: (Default: "1") -| ``NNODES_VX_POINTSTAT``: (Default: "1") -| ``NNODES_VX_ENSGRID``: (Default: "1") -| ``NNODES_VX_ENSGRID_MEAN``: (Default: "1") -| ``NNODES_VX_ENSGRID_PROB``: (Default: "1") -| ``NNODES_VX_ENSPOINT``: (Default: "1") -| ``NNODES_VX_ENSPOINT_MEAN``: (Default: "1") -| ``NNODES_VX_ENSPOINT_PROB``: (Default: "1") - -**Number of MPI processes per node:** - -| ``PPN_MAKE_GRID``: (Default: "24") -| ``PPN_MAKE_OROG``: (Default: "24") -| ``PPN_MAKE_SFC_CLIMO``: (Default: "24") -| ``PPN_GET_EXTRN_ICS``: (Default: "1") -| ``PPN_GET_EXTRN_LBCS``: (Default: "1") -| ``PPN_MAKE_ICS``: (Default: "12") -| ``PPN_MAKE_LBCS``: (Default: "12") -| ``PPN_RUN_FCST``: (Default: "") +``EXTRN_MDL_SOURCE_BASEDIR_ICS``: (Default: "") + Directory containing external model files for generating ICs. If ``USE_USER_STAGED_EXTRN_FILES`` is set to true, the workflow looks within this directory for a subdirectory named "YYYYMMDDHH", which contains the external model files specified by the array ``EXTRN_MDL_FILES_ICS``. This "YYYYMMDDHH" subdirectory corresponds to the start date and cycle hour of the forecast (see :ref:`above `). These files will be used to generate the :term:`ICs` on the native FV3-LAM grid. This variable is not used if ``USE_USER_STAGED_EXTRN_FILES`` is set to false. -.. note:: - The correct value for ``PPN_RUN_FCST`` will be calculated from ``NCORES_PER_NODE`` and ``OMP_NUM_THREADS`` in ``setup.sh``. - -| ``PPN_RUN_POST``: (Default: "24") -| ``PPN_GET_OBS_CCPA``: (Default: "1") -| ``PPN_GET_OBS_MRMS``: (Default: "1") -| ``PPN_GET_OBS_NDAS``: (Default: "1") -| ``PPN_VX_GRIDSTAT``: (Default: "1") -| ``PPN_VX_POINTSTAT``: (Default: "1") -| ``PPN_VX_ENSGRID``: (Default: "1") -| ``PPN_VX_ENSGRID_MEAN``: (Default: "1") -| ``PPN_VX_ENSGRID_PROB``: (Default: "1") -| ``PPN_VX_ENSPOINT``: (Default: "1") -| ``PPN_VX_ENSPOINT_MEAN``: (Default: "1") -| ``PPN_VX_ENSPOINT_PROB``: (Default: "1") - - -**Wall Times:** Maximum amount of time for the task to run - -| ``WTIME_MAKE_GRID``: (Default: "00:20:00") -| ``WTIME_MAKE_OROG``: (Default: "01:00:00") -| ``WTIME_MAKE_SFC_CLIMO``: (Default: "00:20:00") -| ``WTIME_GET_EXTRN_ICS``: (Default: "00:45:00") -| ``WTIME_GET_EXTRN_LBCS``: (Default: "00:45:00") -| ``WTIME_MAKE_ICS``: (Default: "00:30:00") -| ``WTIME_MAKE_LBCS``: (Default: "00:30:00") -| ``WTIME_RUN_FCST``: (Default: "04:30:00") -| ``WTIME_RUN_POST``: (Default: "00:15:00") -| ``WTIME_GET_OBS_CCPA``: (Default: "00:45:00") -| ``WTIME_GET_OBS_MRMS``: (Default: "00:45:00") -| ``WTIME_GET_OBS_NDAS``: (Default: "02:00:00") -| ``WTIME_VX_GRIDSTAT``: (Default: "02:00:00") -| ``WTIME_VX_POINTSTAT``: (Default: "01:00:00") -| ``WTIME_VX_ENSGRID``: (Default: "01:00:00") -| ``WTIME_VX_ENSGRID_MEAN``: (Default: "01:00:00") -| ``WTIME_VX_ENSGRID_PROB``: (Default: "01:00:00") -| ``WTIME_VX_ENSPOINT``: (Default: "01:00:00") -| ``WTIME_VX_ENSPOINT_MEAN``: (Default: "01:00:00") -| ``WTIME_VX_ENSPOINT_PROB``: (Default: "01:00:00") - -**Maximum number of attempts to run a task:** - -| ``MAXTRIES_MAKE_GRID``: (Default: "2") -| ``MAXTRIES_MAKE_OROG``: (Default: "2") -| ``MAXTRIES_MAKE_SFC_CLIMO``: (Default: "2") -| ``MAXTRIES_GET_EXTRN_ICS``: (Default: "1") -| ``MAXTRIES_GET_EXTRN_LBCS``: (Default: "1") -| ``MAXTRIES_MAKE_ICS``: (Default: "1") -| ``MAXTRIES_MAKE_LBCS``: (Default: "1") -| ``MAXTRIES_RUN_FCST``: (Default: "1") -| ``MAXTRIES_RUN_POST``: (Default: "2") -| ``MAXTRIES_GET_OBS_CCPA``: (Default: "1") -| ``MAXTRIES_GET_OBS_MRMS``: (Default: "1") -| ``MAXTRIES_GET_OBS_NDAS``: (Default: "1") -| ``MAXTRIES_VX_GRIDSTAT``: (Default: "1") -| ``MAXTRIES_VX_GRIDSTAT_REFC``: (Default: "1") -| ``MAXTRIES_VX_GRIDSTAT_RETOP``: (Default: "1") -| ``MAXTRIES_VX_GRIDSTAT_03h``: (Default: "1") -| ``MAXTRIES_VX_GRIDSTAT_06h``: (Default: "1") -| ``MAXTRIES_VX_GRIDSTAT_24h``: (Default: "1") -| ``MAXTRIES_VX_POINTSTAT``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_REFC``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_RETOP``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_03h``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_06h``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_24h``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_MEAN``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_PROB``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_MEAN_03h``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_PROB_03h``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_MEAN_06h``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_PROB_06h``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_MEAN_24h``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_PROB_24h``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_PROB_REFC``: (Default: "1") -| ``MAXTRIES_VX_ENSGRID_PROB_RETOP``: (Default: "1") -| ``MAXTRIES_VX_ENSPOINT``: (Default: "1") -| ``MAXTRIES_VX_ENSPOINT_MEAN``: (Default: "1") -| ``MAXTRIES_VX_ENSPOINT_PROB``: (Default: "1") - - -Pre-Processing Parameters -========================= -These parameters set flags (and related directories) that determine whether various workflow tasks should be run. Note that the ``MAKE_GRID_TN``, ``MAKE_OROG_TN``, and ``MAKE_SFC_CLIMO_TN`` are all :term:`cycle-independent` tasks, i.e., if they are to be run, they do so only once at the beginning of the workflow before any cycles are run. +``EXTRN_MDL_SYSBASEDIR_ICS``: (Default: '') + A known location of a real data stream on a given platform. This is typically a real-time data stream as on Hera, Jet, or WCOSS. External model files for generating :term:`ICs` on the native grid should be accessible via this data stream. The way the full path containing these files is constructed depends on the user-specified external model for ICs (defined above in :numref:`Section %s ` ``EXTRN_MDL_NAME_ICS``). -Baseline Workflow Tasks --------------------------- + .. note:: + This variable must be defined as a null string in ``config_defaults.yaml`` so that if it is specified by the user in the experiment configuration file (``config.yaml``), it remains set to those values, and if not, it gets set to machine-dependent values. -``RUN_TASK_MAKE_GRID``: (Default: "TRUE") - Flag that determines whether to run the grid file generation task (``MAKE_GRID_TN``). If this is set to "TRUE", the grid generation task is run and new grid files are generated. If it is set to "FALSE", then the scripts look for pre-generated grid files in the directory specified by ``GRID_DIR`` (see below). +``EXTRN_MDL_FILES_ICS``: (Default: "") + Array containing templates of the file names to search for in the ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` directory. This variable is not used if ``USE_USER_STAGED_EXTRN_FILES`` is set to false. A single template should be used for each model file type that is used. Users may use any of the Python-style templates allowed in the ``ush/retrieve_data.py`` script. To see the full list of supported templates, run that script with the ``-h`` option. + + For example, to set FV3GFS nemsio input files: + + .. code-block:: console -``GRID_DIR``: (Default: "/path/to/pregenerated/grid/files") - The directory containing pre-generated grid files when ``RUN_TASK_MAKE_GRID`` is set to "FALSE". + EXTRN_MDL_FILES_ICS=[ gfs.t{hh}z.atmf{fcst_hr:03d}.nemsio , + gfs.t{hh}z.sfcf{fcst_hr:03d}.nemsio ] + + To set FV3GFS grib files: -``RUN_TASK_MAKE_OROG``: (Default: "TRUE") - Same as ``RUN_TASK_MAKE_GRID`` but for the orography generation task (``MAKE_OROG_TN``). Flag that determines whether to run the orography file generation task (``MAKE_OROG_TN``). If this is set to "TRUE", the orography generation task is run and new orography files are generated. If it is set to "FALSE", then the scripts look for pre-generated orography files in the directory specified by ``OROG_DIR`` (see below). + .. code-block:: console -``OROG_DIR``: (Default: "/path/to/pregenerated/orog/files") - The directory containing pre-generated orography files to use when ``MAKE_OROG_TN`` is set to "FALSE". + EXTRN_MDL_FILES_ICS=[ gfs.t{hh}z.pgrb2.0p25.f{fcst_hr:03d} ] -``RUN_TASK_MAKE_SFC_CLIMO``: (Default: "TRUE") - Same as ``RUN_TASK_MAKE_GRID`` but for the surface climatology generation task (``MAKE_SFC_CLIMO_TN``). Flag that determines whether to run the surface climatology file generation task (``MAKE_SFC_CLIMO_TN``). If this is set to "TRUE", the surface climatology generation task is run and new surface climatology files are generated. If it is set to "FALSE", then the scripts look for pre-generated surface climatology files in the directory specified by ``SFC_CLIMO_DIR`` (see below). +``EXTRN_MDL_DATA_STORES``: (Default: "") + A list of data stores where the scripts should look to find external model data. The list is in priority order. If disk information is provided via ``USE_USER_STAGED_EXTRN_FILES`` or a known location on the platform, the disk location will receive highest priority. Valid values: ``disk`` | ``hpss`` | ``aws`` | ``nomads`` -``SFC_CLIMO_DIR``: (Default: "/path/to/pregenerated/surface/climo/files") - The directory containing pre-generated surface climatology files to use when ``MAKE_SFC_CLIMO_TN`` is set to "FALSE". +NOMADS Parameters +--------------------- -``RUN_TASK_GET_EXTRN_ICS``: (Default: "TRUE") - Flag that determines whether to run the ``GET_EXTRN_ICS_TN`` task. +Set parameters associated with NOMADS online data. -``RUN_TASK_GET_EXTRN_LBCS``: (Default: "TRUE") - Flag that determines whether to run the ``GET_EXTRN_LBCS_TN`` task. +``NOMADS``: (Default: false) + Flag controlling whether to use NOMADS online data. Valid values: ``True`` | ``False`` -``RUN_TASK_MAKE_ICS``: (Default: "TRUE") - Flag that determines whether to run the ``MAKE_ICS_TN`` task. +``NOMADS_file_type``: (Default: "nemsio") + Flag controlling the format of the data. Valid values: ``"GRIB2"`` | ``"grib2"`` | ``"NEMSIO"`` | ``"nemsio"`` -``RUN_TASK_MAKE_LBCS``: (Default: "TRUE") - Flag that determines whether to run the ``MAKE_LBCS_TN`` task. -``RUN_TASK_RUN_FCST``: (Default: "TRUE") - Flag that determines whether to run the ``RUN_FCST_TN`` task. +GET_EXTRN_LBCS Configuration Parameters +========================================== -``RUN_TASK_RUN_POST``: (Default: "TRUE") - Flag that determines whether to run the ``RUN_POST_TN`` task. +Non-default parameters for the ``get_extrn_lbcs`` task are set in the ``task_get_extrn_lbcs:`` section of the ``config.yaml`` file. -.. _VXTasks: +.. _basic-get-extrn-lbcs: -Verification Tasks --------------------- +Basic Task Parameters +--------------------------------- -``RUN_TASK_GET_OBS_CCPA``: (Default: "FALSE") - Flag that determines whether to run the ``GET_OBS_CCPA_TN`` task, which retrieves the :term:`CCPA` hourly precipitation files used by METplus from NOAA :term:`HPSS`. +For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. -``RUN_TASK_GET_OBS_MRMS``: (Default: "FALSE") - Flag that determines whether to run the ``GET_OBS_MRMS_TN`` task, which retrieves the :term:`MRMS` composite reflectivity files used by METplus from NOAA HPSS. +``GET_EXTRN_LBCS_TN``: (Default: "get_extrn_lbcs") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. -``RUN_TASK_GET_OBS_NDAS``: (Default: "FALSE") - Flag that determines whether to run the ``GET_OBS_NDAS_TN`` task, which retrieves the :term:`NDAS` PrepBufr files used by METplus from NOAA HPSS. +``NNODES_GET_EXTRN_LBCS``: (Default: 1) + Number of nodes to use for the job. -``RUN_TASK_VX_GRIDSTAT``: (Default: "FALSE") - Flag that determines whether to run the grid-stat verification task. +``PPN_GET_EXTRN_LBCS``: (Default: 1) + Number of :term:`MPI` processes per node. -``RUN_TASK_VX_POINTSTAT``: (Default: "FALSE") - Flag that determines whether to run the point-stat verification task. +``WTIME_GET_EXTRN_LBCS``: (Default: 00:45:00) + Maximum time for the task to complete. -``RUN_TASK_VX_ENSGRID``: (Default: "FALSE") - Flag that determines whether to run the ensemble-stat verification for gridded data task. +``MAXTRIES_GET_EXTRN_LBCS``: (Default: 1) + Maximum number of times to attempt the task. -``RUN_TASK_VX_ENSPOINT``: (Default: "FALSE") - Flag that determines whether to run the ensemble point verification task. If this flag is set, both ensemble-stat point verification and point verification of ensemble-stat output is computed. +``EXTRN_MDL_NAME_LBCS``: (Default: "FV3GFS") + The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` -.. - COMMENT: Might be worth defining "ensemble-stat verification for gridded data," "ensemble point verification," "ensemble-stat point verification," and "point verification of ensemble-stat output" +``LBC_SPEC_INTVL_HRS``: (Default: "6") + The interval (in integer hours) at which LBC files will be generated. This is also referred to as the *boundary update interval*. Note that the model selected in ``EXTRN_MDL_NAME_LBCS`` must have data available at a frequency greater than or equal to that implied by ``LBC_SPEC_INTVL_HRS``. For example, if ``LBC_SPEC_INTVL_HRS`` is set to "6", then the model must have data available at least every 6 hours. It is up to the user to ensure that this is the case. -Aerosol Climatology Parameter -================================ +``EXTRN_MDL_LBCS_OFFSET_HRS``: (Default: "") + Users may wish to use lateral boundary conditions from a forecast that was started earlier than the start of the forecast configured here. This variable indicates how many hours earlier the external model started than the FV3 forecast configured here. For example, if the forecast should use lateral boundary conditions from the GFS started 6 hours earlier, then ``EXTRN_MDL_LBCS_OFFSET_HRS: "6"``. Note: the default value is model-dependent and is set in ``ush/set_extrn_mdl_params.py``. -``USE_MERRA_CLIMO``: (Default: "FALSE") - Flag that determines whether :term:`MERRA2` aerosol climatology data and lookup tables for optics properties are obtained. +``FV3GFS_FILE_FMT_LBCS``: (Default: "nemsio") + If using the FV3GFS model as the source of the :term:`LBCs` (i.e., if ``EXTRN_MDL_NAME_LBCS: "FV3GFS"``), this variable specifies the format of the model files to use when generating the LBCs. Valid values: ``"nemsio"`` | ``"grib2"`` | ``"netcdf"`` -.. - COMMENT: When would it be appropriate to obtain these files? -Surface Climatology Parameter -============================= -``SFC_CLIMO_FIELDS``: (Default: "("facsf" "maximum_snow_albedo" "slope_type" "snowfree_albedo" "soil_type" "substrate_temperature" "vegetation_greenness" "vegetation_type")" ) - Array containing the names of all the fields for which ``MAKE_SFC_CLIMO_TN`` generates files on the native FV3-LAM grid. +File and Directory Parameters +-------------------------------- -Fixed File Parameters -===================== -These parameters are associated with the fixed (i.e., static) files. On `Level 1 & 2 `__ systems, fixed files are prestaged with paths defined in the ``setup.sh`` script. Because the default values are platform-dependent, they are set to a null string in ``config_defaults.sh``. Then these null values are overwritten in ``setup.sh`` with machine-specific values or with a user-specified value from ``config.sh``. +``USE_USER_STAGED_EXTRN_FILES``: (Default: false) + Analogous to ``USE_USER_STAGED_EXTRN_FILES`` in :term:`ICs` but for :term:`LBCs`. Flag that determines whether the workflow will look for the external model files needed for generating :term:`LBCs` in user-specified directories (rather than fetching them from mass storage like NOAA :term:`HPSS`). Valid values: ``True`` | ``False`` + +``EXTRN_MDL_SOURCE_BASEDIR_LBCS``: (Default: "") + Analogous to ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` but for :term:`LBCs` instead of :term:`ICs`. + Directory containing external model files for generating LBCs. If ``USE_USER_STAGED_EXTRN_FILES`` is set to true, the workflow looks within this directory for a subdirectory named "YYYYMMDDHH", which contains the external model files specified by the array ``EXTRN_MDL_FILES_LBCS``. This "YYYYMMDDHH" subdirectory corresponds to the start date and cycle hour of the forecast (see :ref:`above `). These files will be used to generate the :term:`LBCs` on the native FV3-LAM grid. This variable is not used if ``USE_USER_STAGED_EXTRN_FILES`` is set to false. -``FIXgsm``: (Default: "") - System directory in which the majority of fixed (i.e., time-independent) files that are needed to run the FV3-LAM model are located. +``EXTRN_MDL_SYSBASEDIR_LBCS``: (Default: '') + Same as ``EXTRN_MDL_SYSBASEDIR_ICS`` but for :term:`LBCs`. A known location of a real data stream on a given platform. This is typically a real-time data stream as on Hera, Jet, or WCOSS. External model files for generating :term:`LBCs` on the native grid should be accessible via this data stream. The way the full path containing these files is constructed depends on the user-specified external model for LBCs (defined above in :numref:`Section %s ` ``EXTRN_MDL_NAME_LBCS`` above). -``FIXaer``: (Default: "") - System directory where :term:`MERRA2` aerosol climatology files are located. + .. note:: + This variable must be defined as a null string in ``config_defaults.yaml`` so that if it is specified by the user in the experiment configuration file (``config.yaml``), it remains set to those values, and if not, it gets set to machine-dependent values. -``FIXlut``: (Default: "") - System directory where the lookup tables for optics properties are located. +``EXTRN_MDL_FILES_LBCS``: (Default: "") + Analogous to ``EXTRN_MDL_FILES_ICS`` but for :term:`LBCs` instead of :term:`ICs`. Array containing templates of the file names to search for in the ``EXTRN_MDL_SOURCE_BASEDIR_LBCS`` directory. This variable is not used if ``USE_USER_STAGED_EXTRN_FILES`` is set to false. A single template should be used for each model file type that is used. Users may use any of the Python-style templates allowed in the ``ush/retrieve_data.py`` script. To see the full list of supported templates, run that script with the ``-h`` option. For examples, see the ``EXTRN_MDL_FILES_ICS`` variable above. + +``EXTRN_MDL_DATA_STORES``: (Default: "") + Analogous to ``EXTRN_MDL_DATA_STORES`` in :term:`ICs` but for :term:`LBCs`. A list of data stores where the scripts should look to find external model data. The list is in priority order. If disk information is provided via ``USE_USER_STAGED_EXTRN_FILES`` or a known location on the platform, the disk location will receive highest priority. Valid values: ``disk`` | ``hpss`` | ``aws`` | ``nomads`` -``TOPO_DIR``: (Default: "") - The location on disk of the static input files used by the ``make_orog`` task (i.e., ``orog.x`` and ``shave.x``). Can be the same as ``FIXgsm``. +NOMADS Parameters +--------------------- -``SFC_CLIMO_INPUT_DIR``: (Default: "") - The location on disk of the static surface climatology input fields, used by ``sfc_climo_gen``. These files are only used if ``RUN_TASK_MAKE_SFC_CLIMO=TRUE``. +Set parameters associated with NOMADS online data. Analogus to :term:`ICs` NOMADS Parameters. -``FNGLAC, ..., FNMSKH``: (Default: see below) - .. code-block:: console +``NOMADS``: (Default: false) + Flag controlling whether to use NOMADS online data. - (FNGLAC="global_glacier.2x2.grb" - FNMXIC="global_maxice.2x2.grb" - FNTSFC="RTGSST.1982.2012.monthly.clim.grb" - FNSNOC="global_snoclim.1.875.grb" - FNZORC="igbp" - FNAISC="CFSR.SEAICE.1982.2012.monthly.clim.grb" - FNSMCC="global_soilmgldas.t126.384.190.grb" - FNMSKH="seaice_newland.grb") +``NOMADS_file_type``: (Default: "nemsio") + Flag controlling the format of the data. Valid values: ``"GRIB2"`` | ``"grib2"`` | ``"NEMSIO"`` | ``"nemsio"`` - Names and default locations of (some of the) global data files that are assumed to exist in a system directory. (This directory is machine-dependent; the experiment generation scripts will set it and store it in the variable ``FIXgsm``.) These file names also appear directly in the forecast model's input :term:`namelist` file. +MAKE_ICS Configuration Parameters +====================================== -``FIXgsm_FILES_TO_COPY_TO_FIXam``: (Default: see below) - .. code-block:: console +Non-default parameters for the ``make_ics`` task are set in the ``task_make_ics:`` section of the ``config.yaml`` file. - ("$FNGLAC" \ - "$FNMXIC" \ - "$FNTSFC" \ - "$FNSNOC" \ - "$FNAISC" \ - "$FNSMCC" \ - "$FNMSKH" \ - "global_climaeropac_global.txt" \ - "fix_co2_proj/global_co2historicaldata_2010.txt" \ - "fix_co2_proj/global_co2historicaldata_2011.txt" \ - "fix_co2_proj/global_co2historicaldata_2012.txt" \ - "fix_co2_proj/global_co2historicaldata_2013.txt" \ - "fix_co2_proj/global_co2historicaldata_2014.txt" \ - "fix_co2_proj/global_co2historicaldata_2015.txt" \ - "fix_co2_proj/global_co2historicaldata_2016.txt" \ - "fix_co2_proj/global_co2historicaldata_2017.txt" \ - "fix_co2_proj/global_co2historicaldata_2018.txt" \ - "fix_co2_proj/global_co2historicaldata_2019.txt" \ - "fix_co2_proj/global_co2historicaldata_2020.txt" \ - "fix_co2_proj/global_co2historicaldata_2021.txt" \ - "global_co2historicaldata_glob.txt" \ - "co2monthlycyc.txt" \ - "global_h2o_pltc.f77" \ - "global_hyblev.l65.txt" \ - "global_zorclim.1x1.grb" \ - "global_sfc_emissivity_idx.txt" \ - "global_tg3clim.2.6x1.5.grb" \ - "global_solarconstant_noaa_an.txt" \ - "global_albedo4.1x1.grb" \ - "geo_em.d01.lat-lon.2.5m.HGT_M.nc" \ - "HGT.Beljaars_filtered.lat-lon.30s_res.nc" \ - "replace_with_FIXgsm_ozone_prodloss_filename") - - If not running in NCO mode, this array contains the names of the files to copy from the ``FIXgsm`` system directory to the ``FIXam`` directory under the experiment directory. - - .. note:: - The last element in the list above contains a dummy value. This value will be reset by the workflow generation scripts to the name of the ozone production/loss file that needs to be copied from ``FIXgsm``. This file depends on the :term:`CCPP` physics suite specified for the experiment (and the corresponding ozone parameterization scheme used in that physics suite). +Basic Task Parameters +--------------------------------- -``FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING``: (Default: see below) - .. code-block:: console +For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. - ("FNGLAC | $FNGLAC" \ - "FNMXIC | $FNMXIC" \ - "FNTSFC | $FNTSFC" \ - "FNSNOC | $FNSNOC" \ - "FNAISC | $FNAISC" \ - "FNSMCC | $FNSMCC" \ - "FNMSKH | $FNMSKH" ) +``MAKE_ICS_TN``: (Default: "make_ics") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. - This array is used to set some of the :term:`namelist` variables in the forecast model's namelist file. It maps file symlinks to the actual fixed file locations in the ``FIXam`` directory. The symlink names appear in the first column (to the left of the "|" symbol), and the paths to these files (in the ``FIXam`` directory) are held in workflow variables, which appear to the right of the "|" symbol. It is possible to remove ``FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING`` as a workflow variable and make it only a local one since it is used in only one script. +``NNODES_MAKE_ICS``: (Default: 4) + Number of nodes to use for the job. -``FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING``: (Default: see below) - .. code-block:: console +``PPN_MAKE_ICS``: (Default: 12) + Number of :term:`MPI` processes per node. - ("FNALBC | snowfree_albedo" \ - "FNALBC2 | facsf" \ - "FNTG3C | substrate_temperature" \ - "FNVEGC | vegetation_greenness" \ - "FNVETC | vegetation_type" \ - "FNSOTC | soil_type" \ - "FNVMNC | vegetation_greenness" \ - "FNVMXC | vegetation_greenness" \ - "FNSLPC | slope_type" \ - "FNABSC | maximum_snow_albedo" ) - - This array is used to set some of the :term:`namelist` variables in the forecast model's namelist file. The variable names appear in the first column (to the left of the "|" symbol), and the paths to these surface climatology files on the native FV3-LAM grid (in the ``FIXLAM`` directory) are derived from the corresponding surface climatology fields (the second column of the array). - -``CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING``: (Default: see below) - .. code-block:: console +``WTIME_MAKE_ICS``: (Default: 00:30:00) + Maximum time for the task to complete. - ("aerosol.dat | global_climaeropac_global.txt" \ - "co2historicaldata_2010.txt | fix_co2_proj/global_co2historicaldata_2010.txt" \ - "co2historicaldata_2011.txt | fix_co2_proj/global_co2historicaldata_2011.txt" \ - "co2historicaldata_2012.txt | fix_co2_proj/global_co2historicaldata_2012.txt" \ - "co2historicaldata_2013.txt | fix_co2_proj/global_co2historicaldata_2013.txt" \ - "co2historicaldata_2014.txt | fix_co2_proj/global_co2historicaldata_2014.txt" \ - "co2historicaldata_2015.txt | fix_co2_proj/global_co2historicaldata_2015.txt" \ - "co2historicaldata_2016.txt | fix_co2_proj/global_co2historicaldata_2016.txt" \ - "co2historicaldata_2017.txt | fix_co2_proj/global_co2historicaldata_2017.txt" \ - "co2historicaldata_2018.txt | fix_co2_proj/global_co2historicaldata_2018.txt" \ - "co2historicaldata_2019.txt | fix_co2_proj/global_co2historicaldata_2019.txt" \ - "co2historicaldata_2020.txt | fix_co2_proj/global_co2historicaldata_2020.txt" \ - "co2historicaldata_2021.txt | fix_co2_proj/global_co2historicaldata_2021.txt" \ - "co2historicaldata_glob.txt | global_co2historicaldata_glob.txt" \ - "co2monthlycyc.txt | co2monthlycyc.txt" \ - "global_h2oprdlos.f77 | global_h2o_pltc.f77" \ - "global_albedo4.1x1.grb | global_albedo4.1x1.grb" \ - "global_zorclim.1x1.grb | global_zorclim.1x1.grb" \ - "global_tg3clim.2.6x1.5.grb | global_tg3clim.2.6x1.5.grb" \ - "sfc_emissivity_idx.txt | global_sfc_emissivity_idx.txt" \ - "solarconstant_noaa_an.txt | global_solarconstant_noaa_an.txt" \ - "global_o3prdlos.f77 | " ) - - This array specifies the mapping to use between the symlinks that need to be created in each cycle directory (these are the "files" that :term:`FV3` looks for) and their targets in the ``FIXam`` directory. The first column of the array specifies the symlink to be created, and the second column specifies its target file in ``FIXam`` (where columns are delineated by the pipe symbol "|"). - -Subhourly Forecast Parameters -================================= +``MAXTRIES_MAKE_ICS``: (Default: 1) + Maximum number of times to attempt the task. -``SUB_HOURLY_POST``: (Default: "FALSE") - Flag that indicates whether the forecast model will generate output files on a sub-hourly time interval (e.g., 10 minutes, 15 minutes). This will also cause the post-processor to process these sub-hourly files. If this variable is set to "TRUE", then ``DT_SUBHOURLY_POST_MNTS`` should be set to a valid value between "01" and "59". +``KMP_AFFINITY_MAKE_ICS``: (Default: "scatter") + Intel Thread Affinity Interface for the ``make_ics`` task. See :ref:`this note ` for more information on thread affinity. -``DT_SUB_HOURLY_POST_MNTS``: (Default: "00") - Time interval in minutes between the forecast model output files. If ``SUB_HOURLY_POST`` is set to "TRUE", this needs to be set to a valid two-digit integer between "01" and "59". Note that if ``SUB_HOURLY_POST`` is set to "TRUE" but ``DT_SUB_HOURLY_POST_MNTS`` is set to "00", ``SUB_HOURLY_POST`` will get reset to "FALSE" in the experiment generation scripts (there will be an informational message in the log file to emphasize this). Valid values: ``"1"`` | ``"01"`` | ``"2"`` | ``"02"`` | ``"3"`` | ``"03"`` | ``"4"`` | ``"04"`` | ``"5"`` | ``"05"`` | ``"6"`` | ``"06"`` | ``"10"`` | ``"12"`` | ``"15"`` | ``"20"`` | ``"30"`` +``OMP_NUM_THREADS_MAKE_ICS``: (Default: 1) + The number of OpenMP threads to use for parallel regions. -Customized Post Configuration Parameters -======================================== +``OMP_STACKSIZE_MAKE_ICS``: (Default: "1024m") + Controls the size of the stack for threads created by the OpenMP implementation. -``USE_CUSTOM_POST_CONFIG_FILE``: (Default: "FALSE") - Flag that determines whether a user-provided custom configuration file should be used for post-processing the model data. If this is set to "TRUE", then the workflow will use the custom post-processing (:term:`UPP`) configuration file specified in ``CUSTOM_POST_CONFIG_FP``. Otherwise, a default configuration file provided in the UPP repository will be used. +FVCOM Parameter +------------------- +``USE_FVCOM``: (Default: false) + Flag that specifies whether to update surface conditions in FV3-:term:`LAM` with fields generated from the Finite Volume Community Ocean Model (:term:`FVCOM`). If set to true, lake/sea surface temperatures, ice surface temperatures, and ice placement will be overwritten using data provided by FVCOM. Setting ``USE_FVCOM`` to true causes the executable ``process_FVCOM.exe`` in the ``MAKE_ICS_TN`` task to run. This, in turn, modifies the file ``sfc_data.nc`` generated by ``chgres_cube`` during the ``make_ics`` task. Note that the FVCOM data must already be interpolated to the desired FV3-LAM grid. Valid values: ``True`` | ``False`` -``CUSTOM_POST_CONFIG_FP``: (Default: "") - The full path to the custom flat file, including filename, to be used for post-processing. This is only used if ``CUSTOM_POST_CONFIG_FILE`` is set to "TRUE". +``FVCOM_WCSTART``: (Default: "cold") + Define if this is a "warm" start or a "cold" start. Setting this to "warm" will read in ``sfc_data.nc`` generated in a RESTART directory. Setting this to "cold" will read in the ``sfc_data.nc`` generated from ``chgres_cube`` in the ``make_ics`` portion of the workflow. Valid values: ``"cold"`` | ``"COLD"`` | ``"warm"`` | ``"WARM"`` +``FVCOM_DIR``: (Default: "") + User-defined directory where the ``fvcom.nc`` file containing :term:`FVCOM` data already interpolated to the FV3-LAM native grid is located. The file in this directory must be named ``fvcom.nc``. -Community Radiative Transfer Model (CRTM) Parameters -======================================================= +``FVCOM_FILE``: (Default: "fvcom.nc") + Name of the file located in ``FVCOM_DIR`` that has :term:`FVCOM` data interpolated to the FV3-LAM grid. This file will be copied later to a new location, and the name will be changed to ``fvcom.nc`` if a name other than ``fvcom.nc`` is selected. -These variables set parameters associated with outputting satellite fields in the :term:`UPP` :term:`grib2` files using the Community Radiative Transfer Model (:term:`CRTM`). :numref:`Section %s ` includes further instructions on how to do this. -``USE_CRTM``: (Default: "FALSE") - Flag that defines whether external :term:`CRTM` coefficient files have been staged by the user in order to output synthetic satellite products available within the :term:`UPP`. If this is set to "TRUE", then the workflow will check for these files in the directory ``CRTM_DIR``. Otherwise, it is assumed that no satellite fields are being requested in the UPP configuration. +MAKE_LBCS Configuration Parameters +====================================== -``CRTM_DIR``: (Default: "") - This is the path to the top CRTM fix file directory. This is only used if ``USE_CRTM`` is set to "TRUE". +Non-default parameters for the ``make_lbcs`` task are set in the ``task_make_lbcs:`` section of the ``config.yaml`` file. -Ensemble Model Parameters -============================ +``MAKE_LBCS_TN``: (Default: "make_lbcs") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. -``DO_ENSEMBLE``: (Default: "FALSE") - Flag that determines whether to run a set of ensemble forecasts (for each set of specified cycles). If this is set to "TRUE", ``NUM_ENS_MEMBERS`` forecasts are run for each cycle, each with a different set of stochastic seed values. When "FALSE", a single forecast is run for each cycle. +``NNODES_MAKE_LBCS``: (Default: 4) + Number of nodes to use for the job. -``NUM_ENS_MEMBERS``: (Default: "1") - The number of ensemble members to run if ``DO_ENSEMBLE`` is set to "TRUE". This variable also controls the naming of the ensemble member directories. For example, if ``NUM_ENS_MEMBERS`` is set to "8", the member directories will be named *mem1, mem2, ..., mem8*. If it is set to "08" (with a leading zero), the member directories will be named *mem01, mem02, ..., mem08*. However, after reading in the number of characters in this string (in order to determine how many leading zeros, if any, should be placed in the names of the member directories), the workflow generation scripts strip away those leading zeros. Thus, in the variable definitions file (``GLOBAL_VAR_DEFNS_FN``), this variable appears with its leading zeros stripped. This variable is not used unless ``DO_ENSEMBLE`` is set to "TRUE". +``PPN_MAKE_LBCS``: (Default: 12) + Number of :term:`MPI` processes per node. -.. _HaloBlend: +``WTIME_MAKE_LBCS``: (Default: 00:30:00) + Maximum time for the task to complete. -Halo Blend Parameter -==================== -``HALO_BLEND``: (Default: "10") - Number of cells to use for "blending" the external solution (obtained from the :term:`LBCs`) with the internal solution from the FV3LAM :term:`dycore`. Specifically, it refers to the number of rows into the computational domain that should be blended with the LBCs. Cells at which blending occurs are all within the boundary of the native grid; they don't involve the 4 cells outside the boundary where the LBCs are specified (which is a different :term:`halo`). Blending is necessary to smooth out waves generated due to mismatch between the external and internal solutions. To shut :term:`halo` blending off, set this to zero. +``MAXTRIES_MAKE_LBCS``: (Default: 1) + Maximum number of times to attempt the task. +``KMP_AFFINITY_MAKE_LBCS``: (Default: "scatter") + Intel Thread Affinity Interface for the ``make_lbcs`` task. See :ref:`this note ` for more information on thread affinity. -FVCOM Parameter -=============== -``USE_FVCOM``: (Default: "FALSE") - Flag that specifies whether or not to update surface conditions in FV3-LAM with fields generated from the Finite Volume Community Ocean Model (:term:`FVCOM`). If set to "TRUE", lake/sea surface temperatures, ice surface temperatures, and ice placement will be overwritten using data provided by FVCOM. Setting ``USE_FVCOM`` to "TRUE" causes the executable ``process_FVCOM.exe`` in the ``MAKE_ICS_TN`` task to run. This, in turn, modifies the file ``sfc_data.nc`` generated by ``chgres_cube``. Note that the FVCOM data must already be interpolated to the desired FV3-LAM grid. +``OMP_NUM_THREADS_MAKE_LBCS``: (Default: 1) + The number of OpenMP threads to use for parallel regions. -``FVCOM_WCSTART``: (Default: "cold") - Define if this is a "warm" start or a "cold" start. Setting this to "warm" will read in ``sfc_data.nc`` generated in a RESTART directory. Setting this to "cold" will read in the ``sfc_data.nc`` generated from ``chgres_cube`` in the ``make_ics`` portion of the workflow. Valid values: ``"cold"`` | ``"warm"`` +``OMP_STACKSIZE_MAKE_LBCS``: (Default: "1024m") + Controls the size of the stack for threads created by the OpenMP implementation. -``FVCOM_DIR``: (Default: "/user/defined/dir/to/fvcom/data") - User-defined directory where the ``fvcom.nc`` file containing :term:`FVCOM` data on the FV3-LAM native grid is located. The file name in this directory must be ``fvcom.nc``. +.. _FcstConfigParams: -``FVCOM_FILE``: (Default: "fvcom.nc") - Name of file located in ``FVCOM_DIR`` that has :term:`FVCOM` data interpolated to the FV3-LAM grid. This file will be copied later to a new location and the name changed to ``fvcom.nc`` if a name other than ``fvcom.nc`` is selected. +FORECAST Configuration Parameters +===================================== -Thread Affinity Interface -=========================== +Non-default parameters for the ``run_fcst`` task are set in the ``task_run_fcst:`` section of the ``config.yaml`` file. -.. note:: - Note that settings for the ``make_grid`` and ``make_orog`` tasks are disabled or not included below because they do not use parallelized code. +Basic Task Parameters +--------------------------------- -``KMP_AFFINITY_*``: (Default: see below) +For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. - .. code-block:: console +``RUN_FCST_TN``: (Default: "run_fcst") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. - KMP_AFFINITY_MAKE_OROG="disabled" - KMP_AFFINITY_MAKE_SFC_CLIMO="scatter" - KMP_AFFINITY_MAKE_ICS="scatter" - KMP_AFFINITY_MAKE_LBCS="scatter" - KMP_AFFINITY_RUN_FCST="scatter" - KMP_AFFINITY_RUN_POST="scatter" +``NNODES_RUN_FCST``: (Default: "") + Number of nodes to use for the job. This is calculated in the workflow generation scripts, so there is no need to set it in the configuration file. - "Intel's runtime library can bind OpenMP threads to physical processing units. The interface is controlled using the KMP_AFFINITY environment variable. Thread affinity restricts execution of certain threads to a subset of the physical processing units in a multiprocessor computer. Depending on the system (machine) topology, application, and operating system, thread affinity can have a dramatic effect on the application speed and on the execution speed of a program." Valid values: ``"scatter"`` | ``"disabled"`` | ``"balanced"`` | ``"compact"`` | ``"explicit"`` | ``"none"`` +``PPN_RUN_FCST``: (Default: "") + Number of :term:`MPI` processes per node. It will be calculated from ``NCORES_PER_NODE`` and ``OMP_NUM_THREADS`` in ``setup.py``. - For more information, see the `Intel Development Reference Guide `__. +``WTIME_RUN_FCST``: (Default: 04:30:00) + Maximum time for the task to complete. -``OMP_NUM_THREADS_*``: (Default: see below) +``MAXTRIES_RUN_FCST``: (Default: 1) + Maximum number of times to attempt the task. - .. code-block:: console +``KMP_AFFINITY_RUN_FCST``: (Default: "scatter") + Intel Thread Affinity Interface for the ``run_fcst`` task. - OMP_NUM_THREADS_MAKE_OROG="6" - OMP_NUM_THREADS_MAKE_SFC_CLIMO="1" - OMP_NUM_THREADS_MAKE_ICS="1" - OMP_NUM_THREADS_MAKE_LBCS="1" - OMP_NUM_THREADS_RUN_FCST="2" # atmos_nthreads in model_configure - OMP_NUM_THREADS_RUN_POST="1" +.. _thread-affinity: - The number of OpenMP threads to use for parallel regions. + .. note:: -.. - COMMENT: What does the #atmos_nthreads comment mean? Can it be removed? - + **Thread Affinity Interface** -``OMP_STACKSIZE_*``: (Default: see below) + "Intel's runtime library can bind OpenMP threads to physical processing units. The interface is controlled using the ``KMP_AFFINITY`` environment variable. Thread affinity restricts execution of certain threads to a subset of the physical processing units in a multiprocessor computer. Depending on the system (machine) topology, application, and operating system, thread affinity can have a dramatic effect on the application speed and on the execution speed of a program." Valid values: ``"scatter"`` | ``"disabled"`` | ``"balanced"`` | ``"compact"`` | ``"explicit"`` | ``"none"`` - .. code-block:: console + For more information, see the `Intel Development Reference Guide `__. - OMP_STACKSIZE_MAKE_OROG="2048m" - OMP_STACKSIZE_MAKE_SFC_CLIMO="1024m" - OMP_STACKSIZE_MAKE_ICS="1024m" - OMP_STACKSIZE_MAKE_LBCS="1024m" - OMP_STACKSIZE_RUN_FCST="1024m" - OMP_STACKSIZE_RUN_POST="1024m" +``OMP_NUM_THREADS_RUN_FCST``: (Default: 2) + The number of OpenMP threads to use for parallel regions. Corresponds to the ``atmos_nthreads`` value in ``model_configure``. +``OMP_STACKSIZE_RUN_FCST``: (Default: "1024m") Controls the size of the stack for threads created by the OpenMP implementation. +.. _ModelConfigParams: + +Model Configuration Parameters +---------------------------------- + +These parameters set values in the Weather Model's ``model_configure`` file. + +``DT_ATMOS``: (Default: "") + Time step for the outermost atmospheric model loop in seconds. This corresponds to the frequency at which the physics routines and the top level dynamics routine are called. (Note that one call to the top-level dynamics routine results in multiple calls to the horizontal dynamics, :term:`tracer` transport, and vertical dynamics routines; see the `FV3 dycore scientific documentation `__ for details.) Must be set. Takes an integer value. In the SRW App, a default value for ``DT_ATMOS`` appears in the ``set_predef_grid_params.yaml`` script, but a different value can be set in ``config.yaml``. In general, the smaller the grid cell size is, the smaller this value needs to be in order to avoid numerical instabilities during the forecast. + +``RESTART_INTERVAL``: (Default: 0) + Frequency of the output restart files in hours. Using the default interval (0), restart files are produced at the end of a forecast run. When ``RESTART_INTERVAL: 1``, restart files are produced every hour with the prefix "YYYYMMDD.HHmmSS." in the ``RESTART`` directory. + +.. _InlinePost: + +``WRITE_DOPOST``: (Default: false) + Flag that determines whether to use the inline post option. The default ``WRITE_DOPOST: false`` does not use the inline post functionality, and the ``run_post`` tasks are called from outside of the Weather Model. If ``WRITE_DOPOST: true``, the ``WRITE_DOPOST`` flag in the ``model_configure`` file will be set to true, and the post-processing (:term:`UPP`) tasks will be called from within the Weather Model. This means that the post-processed files (in :term:`grib2` format) are output by the Weather Model at the same time that it outputs the ``dynf###.nc`` and ``phyf###.nc`` files. Setting ``WRITE_DOPOST: true`` turns off the separate ``run_post`` task (i.e., ``RUN_TASK_RUN_POST`` is set to false) in ``setup.py`` to avoid unnecessary computations. Valid values: ``True`` | ``False`` + +Computational Parameters +---------------------------- + +``LAYOUT_X, LAYOUT_Y``: (Default: "") + The number of :term:`MPI` tasks (processes) to use in the two horizontal directions (x and y) of the regional grid when running the forecast model. + +``BLOCKSIZE``: (Default: "") + The amount of data that is passed into the cache at a time. + +.. note:: + + In ``config_defaults.yaml`` the computational parameters are set to null strings so that: + + #. If the experiment is using a predefined grid and the user sets the parameter in the user-specified experiment configuration file (i.e., ``config.yaml``), that value will be used in the forecast(s). Otherwise, the default value for that predefined grid will be used. + #. If the experiment is *not* using a predefined grid (i.e., it is using a custom grid whose parameters are specified in the experiment configuration file), then the user must specify a value for the parameter in that configuration file. Otherwise, the parameter will remain set to a null string, and the experiment generation will fail because the generation scripts check to ensure that all the parameters defined in this section are set to non-empty strings before creating the experiment directory. + +.. _WriteComp: + +Write-Component (Quilting) Parameters +----------------------------------------- + +.. note:: + The :term:`UPP` (called by the ``RUN_POST_TN`` task) cannot process output on the native grid types ("GFDLgrid" and "ESGgrid"), so output fields are interpolated to a **write component grid** before writing them to an output file. The output files written by the UFS Weather Model use an Earth System Modeling Framework (:term:`ESMF`) component, referred to as the **write component**. This model component is configured with settings in the ``model_configure`` file, as described in `Section 4.2.3 `__ of the UFS Weather Model documentation. + +``QUILTING``: (Default: true) + + .. attention:: + The regional grid requires the use of the write component, so users generally should not need to change the default value for ``QUILTING``. + + Flag that determines whether to use the write component for writing forecast output files to disk. If set to true, the forecast model will output files named ``dynf$HHH.nc`` and ``phyf$HHH.nc`` (where ``HHH`` is the 3-digit forecast hour) containing dynamics and physics fields, respectively, on the write-component grid. For example, the output files for the 3rd hour of the forecast would be ``dynf$003.nc`` and ``phyf$003.nc``. (The regridding from the native FV3-LAM grid to the write-component grid is done by the forecast model.) If ``QUILTING`` is set to false, then the output file names are ``fv3_history.nc`` and ``fv3_history2d.nc``, and they contain fields on the native grid. Although the UFS Weather Model can run without quilting, the regional grid requires the use of the write component. Therefore, QUILTING should be set to true when running the SRW App. If ``QUILTING`` is set to false, the ``RUN_POST_TN`` (meta)task cannot run because the :term:`UPP` code called by this task cannot process fields on the native grid. In that case, the ``RUN_POST_TN`` (meta)task will be automatically removed from the Rocoto workflow XML. The :ref:`INLINE POST ` option also requires ``QUILTING`` to be set to true in the SRW App. Valid values: ``True`` | ``False`` + +``PRINT_ESMF``: (Default: false) + Flag that determines whether to output extra (debugging) information from :term:`ESMF` routines. Note that the write component uses ESMF library routines to interpolate from the native forecast model grid to the user-specified output grid (which is defined in the model configuration file ``model_configure`` in the forecast run directory). Valid values: ``True`` | ``False`` + +``WRTCMP_write_groups``: (Default: 1) + The number of write groups (i.e., groups of :term:`MPI` tasks) to use in the write component. Each write group will write to one set of output files (a ``dynf${fhr}.nc`` and a ``phyf${fhr}.nc`` file, where ``${fhr}`` is the forecast hour). Each write group contains ``WRTCMP_write_tasks_per_group`` tasks. Usually, one write group is sufficient. This may need to be increased if the forecast is proceeding so quickly that a single write group cannot complete writing to its set of files before there is a need/request to start writing the next set of files at the next output time. + +``WRTCMP_write_tasks_per_group``: (Default: 20) + The number of MPI tasks to allocate for each write group. + +``WRTCMP_output_grid``: (Default: "''") + Sets the type (coordinate system) of the write component grid. The default empty string forces the user to set a valid value for ``WRTCMP_output_grid`` in ``config.yaml`` if specifying a *custom* grid. When creating an experiment with a user-defined grid, this parameter must be specified or the experiment will fail. Valid values: ``"lambert_conformal"`` | ``"regional_latlon"`` | ``"rotated_latlon"`` + +``WRTCMP_cen_lon``: (Default: "") + Longitude (in degrees) of the center of the write component grid. Can usually be set to the corresponding value from the native grid. + +``WRTCMP_cen_lat``: (Default: "") + Latitude (in degrees) of the center of the write component grid. Can usually be set to the corresponding value from the native grid. + +``WRTCMP_lon_lwr_left``: (Default: "") + Longitude (in degrees) of the center of the lower-left (southwest) cell on the write component grid. If using the "rotated_latlon" coordinate system, this is expressed in terms of the rotated longitude. Must be set manually when running an experiment with a user-defined grid. + +``WRTCMP_lat_lwr_left``: (Default: "") + Latitude (in degrees) of the center of the lower-left (southwest) cell on the write component grid. If using the "rotated_latlon" coordinate system, this is expressed in terms of the rotated latitude. Must be set manually when running an experiment with a user-defined grid. + +**The following parameters must be set when** ``WRTCMP_output_grid`` **is set to "rotated_latlon":** + +``WRTCMP_lon_upr_rght``: (Default: "") + Longitude (in degrees) of the center of the upper-right (northeast) cell on the write component grid (expressed in terms of the rotated longitude). + +``WRTCMP_lat_upr_rght``: (Default: "") + Latitude (in degrees) of the center of the upper-right (northeast) cell on the write component grid (expressed in terms of the rotated latitude). + +``WRTCMP_dlon``: (Default: "") + Size (in degrees) of a grid cell on the write component grid (expressed in terms of the rotated longitude). + +``WRTCMP_dlat``: (Default: "") + Size (in degrees) of a grid cell on the write component grid (expressed in terms of the rotated latitude). + +**The following parameters must be set when** ``WRTCMP_output_grid`` **is set to "lambert_conformal":** + +``WRTCMP_stdlat1``: (Default: "") + First standard latitude (in degrees) in definition of Lambert conformal projection. + +``WRTCMP_stdlat2``: (Default: "") + Second standard latitude (in degrees) in definition of Lambert conformal projection. + +``WRTCMP_nx``: (Default: "") + Number of grid points in the x-coordinate of the Lambert conformal projection. + +``WRTCMP_ny``: (Default: "") + Number of grid points in the y-coordinate of the Lambert conformal projection. + +``WRTCMP_dx``: (Default: "") + Grid cell size (in meters) along the x-axis of the Lambert conformal projection. + +``WRTCMP_dy``: (Default: "") + Grid cell size (in meters) along the y-axis of the Lambert conformal projection. + +.. _PredefGrid: + +Predefined Grid Parameters +------------------------------ + +``PREDEF_GRID_NAME``: (Default: "") + This parameter indicates which (if any) predefined regional grid to use for the experiment. Setting ``PREDEF_GRID_NAME`` provides a convenient method of specifying a commonly used set of grid-dependent parameters. The predefined grid settings can be viewed in the script ``ush/set_predef_grid_params.yaml``. + + **Currently supported options:** + + | ``"RRFS_CONUS_25km"`` + | ``"RRFS_CONUS_13km"`` + | ``"RRFS_CONUS_3km"`` + | ``"SUBCONUS_Ind_3km"`` + + **Other valid values include:** + + | ``"CONUS_25km_GFDLgrid"`` + | ``"CONUS_3km_GFDLgrid"`` + | ``"EMC_AK"`` + | ``"EMC_HI"`` + | ``"EMC_PR"`` + | ``"EMC_GU"`` + | ``"GSL_HAFSV0.A_25km"`` + | ``"GSL_HAFSV0.A_13km"`` + | ``"GSL_HAFSV0.A_3km"`` + | ``"GSD_HRRR_AK_50km"`` + | ``"RRFS_AK_13km"`` + | ``"RRFS_AK_3km"`` + | ``"RRFS_CONUScompact_25km"`` + | ``"RRFS_CONUScompact_13km"`` + | ``"RRFS_CONUScompact_3km"`` + | ``"RRFS_NA_13km"`` + | ``"RRFS_NA_3km"`` + | ``"RRFS_SUBCONUS_3km"`` + | ``"WoFS_3km"`` + +.. note:: + + * If ``PREDEF_GRID_NAME`` is set to a valid predefined grid name, the grid generation method, the (native) grid parameters, and the write component grid parameters are set to predefined values for the specified grid, overwriting any settings of these parameters in the user-specified experiment configuration file (``config.yaml``). In addition, if the time step ``DT_ATMOS`` and the computational parameters (``LAYOUT_X``, ``LAYOUT_Y``, and ``BLOCKSIZE``) are not specified in that configuration file, they are also set to predefined values for the specified grid. + + * If ``PREDEF_GRID_NAME`` is set to an empty string, it implies that the user will provide the native grid parameters in the user-specified experiment configuration file (``config.yaml``). In this case, the grid generation method, the native grid parameters, the write component grid parameters, the main time step (``DT_ATMOS``), and the computational parameters (``LAYOUT_X``, ``LAYOUT_Y``, and ``BLOCKSIZE``) must be set in the configuration file. Otherwise, the values of the parameters in the default experiment configuration file (``config_defaults.yaml``) will be used. + +Aerosol Climatology Parameter +--------------------------------- + +``USE_MERRA_CLIMO``: (Default: false) + Flag that determines whether :term:`MERRA2` aerosol climatology data and lookup tables for optics properties are obtained. Valid values: ``True`` | ``False`` + + .. COMMENT: When would it be appropriate to obtain these files? + +Fixed File Parameters +------------------------- + +These parameters are associated with the fixed (i.e., static) files. On `Level 1 & 2 `__ systems, fixed files are pre-staged with paths defined in the ``setup.py`` script. Because the default values are platform-dependent, they are set to a null string in ``config_defaults.yaml``. Then these null values are overwritten in ``setup.py`` with machine-specific values or with a user-specified value from ``config.yaml``. + +``FIXgsm``: (Default: "") + System directory in which the majority of fixed (i.e., time-independent) files that are needed to run the FV3-LAM model are located. + +``FIXaer``: (Default: "") + System directory where :term:`MERRA2` aerosol climatology files are located. + +``FIXlut``: (Default: "") + System directory where the lookup tables for optics properties are located. + +``FIXshp``: (Default: "") + System directory where the graphics shapefiles are located. On Level 1 systems, these are set within the machine files. Users on other systems will need to provide the path to the directory that contains the *Natural Earth* shapefiles. + +``TOPO_DIR``: (Default: "") + The location on disk of the static input files used by the ``make_orog`` task (i.e., ``orog.x`` and ``shave.x``). Can be the same as ``FIXgsm``. + +``SFC_CLIMO_INPUT_DIR``: (Default: "") + The location on disk of the static surface climatology input fields, used by ``sfc_climo_gen``. These files are only used if ``RUN_TASK_MAKE_SFC_CLIMO: true``. + +``SYMLINK_FIX_FILES``: (Default: true) + Flag that indicates whether to symlink or copy fix files to the experiment directory. + +RUN_POST Configuration Parameters +===================================== + +Non-default parameters for the ``run_post`` task are set in the ``task_run_post:`` section of the ``config.yaml`` file. + +Basic Task Parameters +--------------------------------- + +For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. + +``RUN_POST_TN``: (Default: "run_post") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_RUN_POST``: (Default: 2) + Number of nodes to use for the job. + +``PPN_RUN_POST``: (Default: 24) + Number of :term:`MPI` processes per node. + +``WTIME_RUN_POST``: (Default: 00:15:00) + Maximum time for the task to complete. + +``MAXTRIES_RUN_POST``: (Default: 2) + Maximum number of times to attempt the task. + +``KMP_AFFINITY_RUN_POST``: (Default: "scatter") + Intel Thread Affinity Interface for the ``run_post`` task. See :ref:`this note ` for more information on thread affinity. + +``OMP_NUM_THREADS_RUN_POST``: (Default: 1) + The number of OpenMP threads to use for parallel regions. + +``OMP_STACKSIZE_RUN_POST``: (Default: "1024m") + Controls the size of the stack for threads created by the OpenMP implementation. + + +Subhourly Post Parameters +----------------------------- +Set parameters associated with subhourly forecast model output and post-processing. + +``SUB_HOURLY_POST``: (Default: false) + Flag that indicates whether the forecast model will generate output files on a sub-hourly time interval (e.g., 10 minutes, 15 minutes). This will also cause the post-processor to process these sub-hourly files. If this variable is set to true, then ``DT_SUBHOURLY_POST_MNTS`` should be set to a valid value between 1 and 59. Valid values: ``True`` | ``False`` + +``DT_SUB_HOURLY_POST_MNTS``: (Default: 0) + Time interval in minutes between the forecast model output files (only used if ``SUB_HOURLY_POST`` is set to true). If ``SUB_HOURLY_POST`` is set to true, this needs to be set to a valid two-digit integer between 1 and 59. Note that if ``SUB_HOURLY_POST`` is set to true but ``DT_SUB_HOURLY_POST_MNTS`` is set to 0, ``SUB_HOURLY_POST`` will get reset to false in the experiment generation scripts (there will be an informational message in the log file to emphasize this). Valid values: ``0`` | ``1`` | ``2`` | ``3`` | ``4`` | ``5`` | ``6`` | ``10`` | ``12`` | ``15`` | ``20`` | ``30`` + +Customized Post Configuration Parameters +-------------------------------------------- + +Set parameters for customizing the :term:`UPP`. + +``USE_CUSTOM_POST_CONFIG_FILE``: (Default: false) + Flag that determines whether a user-provided custom configuration file should be used for post-processing the model data. If this is set to true, then the workflow will use the custom post-processing (:term:`UPP`) configuration file specified in ``CUSTOM_POST_CONFIG_FP``. Otherwise, a default configuration file provided in the UPP repository will be used. Valid values: ``True`` | ``False`` + +``CUSTOM_POST_CONFIG_FP``: (Default: "") + The full path to the custom post flat file, including filename, to be used for post-processing. This is only used if ``CUSTOM_POST_CONFIG_FILE`` is set to true. + +``POST_OUTPUT_DOMAIN_NAME``: (Default: "") + Domain name (in lowercase) used to construct the names of the output files generated by the :term:`UPP`. If using a predefined grid, ``POST_OUTPUT_DOMAIN_NAME`` defaults to ``PREDEF_GRID_NAME``. If using a custom grid, ``POST_OUTPUT_DOMAIN_NAME`` must be specified by the user. The post output files are named as follows: + + .. code-block:: console + + $NET.tHHz.[var_name].f###.${POST_OUTPUT_DOMAIN_NAME}.grib2 + + Note that this variable is first changed to lower case before being used to construct the file names. + +.. _get-obs-ccpa: + +GET_OBS_CCPA Configuration Parameters +======================================== + +Non-default parameters for the ``get_obs_ccpa`` task are set in the ``task_get_obs_ccpa:`` section of the ``config.yaml`` file. + +``GET_OBS_CCPA_TN``: (Default: "get_obs_ccpa") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. See :numref:`Section %s ` for more information about the verification tasks. + +``NNODES_GET_OBS_CCPA``: (Default: 1) + Number of nodes to use for the job. + +``PPN_GET_OBS_CCPA``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_GET_OBS_CCPA``: (Default: 00:45:00) + Maximum time for the task to complete. + +``MAXTRIES_GET_OBS_CCPA``: (Default: 1) + Maximum number of times to attempt the task. + +.. _get-obs-mrms: + +GET_OBS_MRMS Configuration Parameters +======================================== + +Non-default parameters for the ``get_obs_mrms`` task are set in the ``task_get_obs_mrms:`` section of the ``config.yaml`` file. See :numref:`Section %s ` for more information about the verification tasks. + +``GET_OBS_MRMS_TN``: (Default: "get_obs_mrms") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_GET_OBS_MRMS``: (Default: 1) + Number of nodes to use for the job. + +``PPN_GET_OBS_MRMS``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_GET_OBS_MRMS``: (Default: 00:45:00) + Maximum time for the task to complete. + +``MAXTRIES_GET_OBS_MRMS``: (Default: 1) + Maximum number of times to attempt the task. + +.. _get-obs-ndas: + +GET_OBS_NDAS Configuration Parameters +======================================== + +Non-default parameters for the ``get_obs_ndas`` task are set in the ``task_get_obs_ndas:`` section of the ``config.yaml`` file. See :numref:`Section %s ` for more information about the verification tasks. + +``GET_OBS_NDAS_TN``: (Default: "get_obs_ndas") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_GET_OBS_NDAS``: (Default: 1) + Number of nodes to use for the job. + +``PPN_GET_OBS_NDAS``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_GET_OBS_NDAS``: (Default: 02:00:00) + Maximum time for the task to complete. + +``MAXTRIES_GET_OBS_NDAS``: (Default: 1) + Maximum number of times to attempt the task. + + +.. _VX-gridstat: + +VX_GRIDSTAT Configuration Parameters +======================================== + +Non-default parameters for the ``run_gridstatvx`` task are set in the ``task_run_vx_gridstat:`` section of the ``config.yaml`` file. + +``VX_GRIDSTAT_TN``: (Default: "run_gridstatvx") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_GRIDSTAT``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_GRIDSTAT``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_GRIDSTAT``: (Default: 02:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_GRIDSTAT``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_GRIDSTAT_REFC Configuration Parameters +============================================= + +Non-default parameters for the ``run_gridstatvx_refc`` task are set in the ``task_run_vx_gridstat_refc:`` section of the ``config.yaml`` file. + +``VX_GRIDSTAT_REFC_TN``: (Default: "run_gridstatvx_refc") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_GRIDSTAT``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_GRIDSTAT``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_GRIDSTAT``: (Default: 02:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_GRIDSTAT_REFC``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_GRIDSTAT_RETOP Configuration Parameters +============================================= + +Non-default parameters for the ``run_gridstatvx_retop`` task are set in the ``task_run_vx_gridstat_retop:`` section of the ``config.yaml`` file. + +``VX_GRIDSTAT_RETOP_TN``: (Default: "run_gridstatvx_retop") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_GRIDSTAT``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_GRIDSTAT``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_GRIDSTAT``: (Default: 02:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_GRIDSTAT_RETOP``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_GRIDSTAT_03h Configuration Parameters +============================================= + +Non-default parameters for the ``run_gridstatvx_03h`` task are set in the ``task_run_vx_gridstat_03h:`` section of the ``config.yaml`` file. + +``VX_GRIDSTAT_03h_TN``: (Default: "run_gridstatvx_03h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_GRIDSTAT``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_GRIDSTAT``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_GRIDSTAT``: (Default: 02:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_GRIDSTAT_03h``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_GRIDSTAT_06h Configuration Parameters +============================================= + +Non-default parameters for the ``run_gridstatvx_06h`` task are set in the ``task_run_vx_gridstat_06h:`` section of the ``config.yaml`` file. + +``VX_GRIDSTAT_06h_TN``: (Default: "run_gridstatvx_06h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_GRIDSTAT``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_GRIDSTAT``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_GRIDSTAT``: (Default: 02:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_GRIDSTAT_06h``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_GRIDSTAT_24h Configuration Parameters +============================================= + +Non-default parameters for the ``run_gridstatvx_24h`` task are set in the ``task_run_vx_gridstat_24h:`` section of the ``config.yaml`` file. + +``VX_GRIDSTAT_24h_TN``: (Default: "run_gridstatvx_24h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_GRIDSTAT``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_GRIDSTAT``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_GRIDSTAT``: (Default: 02:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_GRIDSTAT_24h``: (Default: 1) + Maximum number of times to attempt the task. + +.. _VX-pointstat: + +VX_POINTSTAT Configuration Parameters +============================================= + +Non-default parameters for the ``run_pointstatvx`` task are set in the ``task_run_vx_pointstat:`` section of the ``config.yaml`` file. + +``VX_POINTSTAT_TN``: (Default: "run_pointstatvx") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_POINTSTAT``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_POINTSTAT``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_POINTSTAT``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_POINTSTAT``: (Default: 1) + Maximum number of times to attempt the task. + +.. _VX-ensgrid: + +VX_ENSGRID Configuration Parameters +============================================= + +Non-default parameters for the ``run_ensgridvx_*`` tasks are set in the ``task_run_vx_ensgrid:`` section of the ``config.yaml`` file. + +``VX_ENSGRID_03h_TN``: (Default: "run_ensgridvx_03h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``MAXTRIES_VX_ENSGRID_03h``: (Default: 1) + Maximum number of times to attempt the task. + +``VX_ENSGRID_06h_TN``: (Default: "run_ensgridvx_06h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``MAXTRIES_VX_ENSGRID_06h``: (Default: 1) + Maximum number of times to attempt the task. + +``VX_ENSGRID_24h_TN``: (Default: "run_ensgridvx_24h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``MAXTRIES_VX_ENSGRID_24h``: (Default: 1) + Maximum number of times to attempt the task. + +``VX_ENSGRID_RETOP_TN``: (Default: "run_ensgridvx_retop") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``MAXTRIES_VX_ENSGRID_RETOP``: (Default: 1) + Maximum number of times to attempt the task. + +``VX_ENSGRID_PROB_RETOP_TN``: (Default: "run_ensgridvx_prob_retop") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``MAXTRIES_VX_ENSGRID_PROB_RETOP``: (Default: 1) + Maximum number of times to attempt the task. + +``NNODES_VX_ENSGRID``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSGRID``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSGRID``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSGRID``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSGRID_REFC Configuration Parameters +============================================= + +Non-default parameters for the ``run_ensgridvx_refc`` task are set in the ``task_run_vx_ensgrid_refc:`` section of the ``config.yaml`` file. + +``VX_ENSGRID_REFC_TN``: (Default: "run_ensgridvx_refc") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSGRID``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSGRID``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSGRID``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSGRID_REFC``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSGRID_MEAN Configuration Parameters +============================================= + +Non-default parameters for the ``run_ensgridvx_mean`` task are set in the ``task_run_vx_ensgrid_mean:`` section of the ``config.yaml`` file. + +``VX_ENSGRID_MEAN_TN``: (Default: "run_ensgridvx_mean") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSGRID_MEAN``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSGRID_MEAN``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSGRID_MEAN``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSGRID_MEAN``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSGRID_MEAN_03h Configuration Parameters +=============================================== + +Non-default parameters for the ``run_ensgridvx_mean_03h`` task are set in the ``task_run_vx_ensgrid_mean_03h:`` section of the ``config.yaml`` file. + +``VX_ENSGRID_MEAN_03h_TN``: (Default: "run_ensgridvx_mean_03h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSGRID_MEAN``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSGRID_MEAN``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSGRID_MEAN``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSGRID_MEAN_03h``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSGRID_MEAN_06h Configuration Parameters +=============================================== + +Non-default parameters for the ``run_ensgridvx_mean_06h`` task are set in the ``task_run_vx_ensgrid_mean_06h:`` section of the ``config.yaml`` file. + +``VX_ENSGRID_MEAN_06h_TN``: (Default: "run_ensgridvx_mean_06h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSGRID_MEAN``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSGRID_MEAN``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSGRID_MEAN``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSGRID_MEAN_06h``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSGRID_MEAN_24h Configuration Parameters +=============================================== + +Non-default parameters for the ``run_ensgridvx_mean_24h`` task are set in the ``task_run_vx_ensgrid_mean_24h:`` section of the ``config.yaml`` file. + +``VX_ENSGRID_MEAN_24h_TN``: (Default: "run_ensgridvx_mean_24h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSGRID_MEAN``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSGRID_MEAN``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSGRID_MEAN``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSGRID_MEAN_24h``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSGRID_PROB Configuration Parameters +============================================ + +Non-default parameters for the ``run_ensgridvx_prob`` task are set in the ``task_run_vx_ensgrid_prob:`` section of the ``config.yaml`` file. + +``VX_ENSGRID_PROB_TN``: (Default: "run_ensgridvx_prob") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSGRID_PROB``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSGRID_PROB``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSGRID_PROB``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSGRID_PROB``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSGRID_PROB_03h Configuration Parameters +================================================ + +Non-default parameters for the ``run_ensgridvx_prob_03h`` task are set in the ``task_run_vx_ensgrid_prob_03h:`` section of the ``config.yaml`` file. + +``VX_ENSGRID_PROB_03h_TN``: (Default: "run_ensgridvx_prob_03h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSGRID_PROB``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSGRID_PROB``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSGRID_PROB``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSGRID_PROB_03h``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSGRID_PROB_06h Configuration Parameters +================================================ + +Non-default parameters for the ``run_ensgridvx_prob_06h`` task are set in the ``task_run_vx_ensgrid_prob_06h:`` section of the ``config.yaml`` file. + +``VX_ENSGRID_PROB_06h_TN``: (Default: "run_ensgridvx_prob_06h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSGRID_PROB``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSGRID_PROB``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSGRID_PROB``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSGRID_PROB_06h``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSGRID_PROB_24h Configuration Parameters +================================================ + +Non-default parameters for the ``run_ensgridvx_prob_24h`` task are set in the ``task_run_vx_ensgrid_prob_24h:`` section of the ``config.yaml`` file. + +``VX_ENSGRID_PROB_24h_TN``: (Default: "run_ensgridvx_prob_24h") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSGRID_PROB``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSGRID_PROB``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSGRID_PROB``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSGRID_PROB_24h``: (Default: 1) + Maximum number of times to attempt the task. + +.. _VX-enspoint: + +VX_ENSPOINT Configuration Parameters +======================================== + +Non-default parameters for the ``run_enspointvx`` task are set in the ``task_run_vx_enspoint:`` section of the ``config.yaml`` file. + +``VX_ENSPOINT_TN``: (Default: "run_enspointvx") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSPOINT``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSPOINT``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSPOINT``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSPOINT``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSPOINT_MEAN Configuration Parameters +============================================== + +Non-default parameters for the ``run_enspointvx_mean`` task are set in the ``task_run_vx_enspoint_mean:`` section of the ``config.yaml`` file. + +``VX_ENSPOINT_MEAN_TN``: (Default: "run_enspointvx_mean") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSPOINT_MEAN``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSPOINT_MEAN``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSPOINT_MEAN``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSPOINT_MEAN``: (Default: 1) + Maximum number of times to attempt the task. + + +VX_ENSPOINT_PROB Configuration Parameters +============================================== + +Non-default parameters for the ``run_enspointvx_prob`` task are set in the ``task_run_vx_enspoint_prob:`` section of the ``config.yaml`` file. + +``VX_ENSPOINT_PROB_TN``: (Default: "run_enspointvx_prob") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_VX_ENSPOINT_PROB``: (Default: 1) + Number of nodes to use for the job. + +``PPN_VX_ENSPOINT_PROB``: (Default: 1) + Number of :term:`MPI` processes per node. + +``WTIME_VX_ENSPOINT_PROB``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_VX_ENSPOINT_PROB``: (Default: 1) + Maximum number of times to attempt the task. + +.. _PlotVars: + +PLOT_ALLVARS Configuration Parameters +======================================== + +Non-default parameters for the ``plot_allvars`` task are set in the ``task_plot_allvars:`` section of the ``config.yaml`` file. + +Basic Task Parameters +-------------------------- + +For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. Typically, users do not need to adjust the default values. + +``PLOT_ALLVARS_TN``: (Default: "plot_allvars") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_PLOT_ALLVARS``: (Default: 1) + Number of nodes to use for the job. + +``PPN_PLOT_ALLVARS``: (Default: 24) + Number of :term:`MPI` processes per node. + +``WTIME_PLOT_ALLVARS``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_PLOT_ALLVARS``: (Default: 1) + Maximum number of times to attempt the task. + +Additional Parameters +------------------------ + +Typically, the following parameters must be set explicitly by the user in the configuration file (``config.yaml``) when executing the plotting tasks. + +``COMOUT_REF``: (Default: "") + The directory where the GRIB2 files from post-processing are located. In *community* mode (i.e., when ``RUN_ENVIR: "community"``), this directory will correspond to the location in the experiment directory where the post-processed output can be found (e.g., ``$EXPTDIR/$DATE_FIRST_CYCL/postprd``). In *nco* mode, this directory should be set to the location of the COMOUT directory and end with ``$PDY/$cyc``. + +``PLOT_FCST_START``: (Default: 0) + The starting forecast hour for the plotting task. For example, if a forecast starts at 18h/18z, this is considered the 0th forecast hour, so "starting forecast hour" should be 0, not 18. If a forecast starts at 18h/18z, but the user only wants plots from the 6th forecast hour on, "starting forecast hour" should be 6. + +``PLOT_FCST_INC``: (Default: 3) + Forecast hour increment for the plotting task. This may be the same as ``INCR_CYCL_FREQ``, or it may be a multiple of ``INCR_CYCL_FREQ``. For example, if ``INCR_CYCL_FREQ`` is set to 3, there will be forecast output every three hours for the duration of the forecast. If the user wants plots of all of this output, they should set ``PLOT_FCST_INC: 3``. If the user only wants plots for some of the output (e.g., every 6 hours), they should set ``PLOT_FCST_INC: 6``. However, there must be forecast output available at the designated increments to produce the plots. In this example, setting ``PLOT_FCST_INC: 7`` would produce an error because there is only forecast output available for hours 3, 6, 9, ..., etc. + +``PLOT_FCST_END``: (Default: "") + The last forecast hour for the plotting task. For example, if a forecast run for 24 hours, and the user wants plots for each available hour of forecast output, they should set ``PLOT_FCST_END: 24``. If the user only wants plots from the first 12 hours of the forecast, the "last forecast hour" should be 12. + +Global Configuration Parameters +=================================== + +Non-default parameters for the miscellaneous tasks are set in the ``global:`` section of the ``config.yaml`` file. + +Community Radiative Transfer Model (CRTM) Parameters +-------------------------------------------------------- + +These variables set parameters associated with outputting satellite fields in the :term:`UPP` :term:`grib2` files using the Community Radiative Transfer Model (:term:`CRTM`). :numref:`Section %s ` includes further instructions on how to do this. + +``USE_CRTM``: (Default: false) + Flag that defines whether external :term:`CRTM` coefficient files have been staged by the user in order to output synthetic satellite products available within the :term:`UPP`. If this is set to true, then the workflow will check for these files in the directory ``CRTM_DIR``. Otherwise, it is assumed that no satellite fields are being requested in the UPP configuration. Valid values: ``True`` | ``False`` + +``CRTM_DIR``: (Default: "") + This is the path to the top CRTM fix file directory. This is only used if ``USE_CRTM`` is set to true. + + +Ensemble Model Parameters +----------------------------- + +Set parameters associated with running ensembles. + +``DO_ENSEMBLE``: (Default: false) + Flag that determines whether to run a set of ensemble forecasts (for each set of specified cycles). If this is set to true, ``NUM_ENS_MEMBERS`` forecasts are run for each cycle, each with a different set of stochastic seed values. When false, a single forecast is run for each cycle. Valid values: ``True`` | ``False`` + +``NUM_ENS_MEMBERS``: (Default: 1) + The number of ensemble members to run if ``DO_ENSEMBLE`` is set to true. This variable also controls the naming of the ensemble member directories. For example, if ``NUM_ENS_MEMBERS`` is set to 8, the member directories will be named *mem1, mem2, ..., mem8*. This variable is not used unless ``DO_ENSEMBLE`` is set to true. + +.. _stochastic-physics: + +Stochastic Physics Parameters +---------------------------------- + +Set default ad-hoc stochastic physics options. For the most updated and detailed documentation of these parameters, see the `UFS Stochastic Physics Documentation `__. + +``NEW_LSCALE``: (Default: true) + Use correct formula for converting a spatial legnth scale into spectral space. + +Specific Humidity (SHUM) Perturbation Parameters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +``DO_SHUM``: (Default: false) + Flag to turn Specific Humidity (SHUM) perturbations on or off. SHUM perturbations multiply the low-level specific humidity by a small random number at each time-step. The SHUM scheme attempts to address missing physics phenomena (e.g., cold pools, gust fronts) most active in convective regions. Valid values: ``True`` | ``False`` + +``ISEED_SHUM``: (Default: 2) + Seed for setting the SHUM random number sequence. + +``SHUM_MAG``: (Default: 0.006) + Amplitudes of random patterns. Corresponds to the variable ``shum`` in ``input.nml``. + +``SHUM_LSCALE``: (Default: 150000) + Decorrelation spatial scale in meters. + +``SHUM_TSCALE``: (Default: 21600) + Decorrelation timescale in seconds. Corresponds to the variable ``shum_tau`` in ``input.nml``. + +``SHUM_INT``: (Default: 3600) + Interval in seconds to update random pattern (optional). Perturbations still get applied at every time-step. Corresponds to the variable ``shumint`` in ``input.nml``. + +.. _SPPT: + +Stochastically Perturbed Physics Tendencies (SPPT) Parameters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +SPPT perturbs full physics tendencies *after* the call to the physics suite, unlike :ref:`SPP ` (below), which perturbs specific tuning parameters within a physics scheme. + +``DO_SPPT``: (Default: false) + Flag to turn Stochastically Perturbed Physics Tendencies (SPPT) on or off. SPPT multiplies the physics tendencies by a random number between 0 and 2 before updating the model state. This addresses error in the physics parameterizations (either missing physics or unresolved subgrid processes). It is most active in the boundary layer and convective regions. Valid values: ``True`` | ``False`` + +``ISEED_SPPT``: (Default: 1) + Seed for setting the SPPT random number sequence. + +``SPPT_MAG``: (Default: 0.7) + Amplitude of random patterns. Corresponds to the variable ``sppt`` in ``input.nml``. + +``SPPT_LOGIT``: (Default: true) + Limits the SPPT perturbations to between 0 and 2. Should be "TRUE"; otherwise the model will crash. + +``SPPT_LSCALE``: (Default: 150000) + Decorrelation spatial scale in meters. + +``SPPT_TSCALE``: (Default: 21600) + Decorrelation timescale in seconds. Corresponds to the variable ``sppt_tau`` in ``input.nml``. + +``SPPT_INT``: (Default: 3600) + Interval in seconds to update random pattern (optional parameter). Perturbations still get applied at every time-step. Corresponds to the variable ``spptint`` in ``input.nml``. + +``SPPT_SFCLIMIT``: (Default: true) + When true, tapers the SPPT perturbations to zero at the model's lowest level, which reduces model crashes. + +``USE_ZMTNBLCK``: (Default: false) + When true, do not apply perturbations below the dividing streamline that is diagnosed by the gravity wave drag, mountain blocking scheme. Valid values: ``True`` | ``False`` + + +Stochastic Kinetic Energy Backscatter (SKEB) Parameters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +``DO_SKEB``: (Default: false) + Flag to turn Stochastic Kinetic Energy Backscatter (SKEB) on or off. SKEB adds wind perturbations to the model state. Perturbations are random in space/time, but amplitude is determined by a smoothed dissipation estimate provided by the :term:`dynamical core`. SKEB addresses errors in the dynamics more active in the mid-latitudes. Valid values: ``True`` | ``False`` + +``ISEED_SKEB``: (Default: 3) + Seed for setting the SHUM random number sequence. + +``SKEB_MAG``: (Default: 0.5) + Amplitude of random patterns. Corresponds to the variable ``skeb`` in ``input.nml``. + +``SKEB_LSCALE``: (Default: 150000) + Decorrelation spatial scale in meters. + +``SKEB_TSCALE``: (Default: 21600) + Decorrelation timescale in seconds. Corresponds to the variable ``skeb_tau`` in ``input.nml``. + +``SKEB_INT``: (Default: 3600) + Interval in seconds to update random pattern (optional). Perturbations still get applied every time-step. Corresponds to the variable ``skebint`` in ``input.nml``. + +``SKEBNORM``: (Default: 1) + Patterns: + * 0-random pattern is stream function + * 1-pattern is K.E. norm + * 2-pattern is vorticity + +``SKEB_VDOF``: (Default: 10) + The number of degrees of freedom in the vertical direction for the SKEB random pattern. + + +.. _SPP: + +Parameters for Stochastically Perturbed Parameterizations (SPP) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +SPP perturbs specific tuning parameters within a physics :term:`parameterization ` (unlike :ref:`SPPT `, which multiplies overall physics tendencies by a random perturbation field *after* the call to the physics suite). Patterns evolve and are applied at each time step. Each SPP option is an array, applicable (in order) to the :term:`RAP`/:term:`HRRR`-based parameterization listed in ``SPP_VAR_LIST``. Enter each value of the array in ``config.yaml`` as shown below without commas or single quotes (e.g., ``SPP_VAR_LIST: [ "pbl" "sfc" "mp" "rad" "gwd" ]`` ). Both commas and single quotes will be added by Jinja when creating the namelist. + +.. note:: + SPP is currently only available for specific physics schemes used in the RAP/HRRR physics suite. Users need to be aware of which :term:`SDF` is chosen when turning this option on. Of the four supported physics suites, the full set of parameterizations can only be used with the ``FV3_HRRR`` option for ``CCPP_PHYS_SUITE``. + +``DO_SPP``: (Default: false) + Flag to turn SPP on or off. SPP perturbs parameters or variables with unknown or uncertain magnitudes within the physics code based on ranges provided by physics experts. Valid values: ``True`` | ``False`` + +``ISEED_SPP``: (Default: [ 4, 5, 6, 7, 8 ] ) + Seed for setting the random number sequence for the perturbation pattern. + +``SPP_VAR_LIST``: (Default: [ "pbl", "sfc", "mp", "rad", "gwd" ] ) + The list of parameterizations to perturb: planetary boundary layer (PBL), surface physics (SFC), microphysics (MP), radiation (RAD), gravity wave drag (GWD). Valid values: ``"pbl"`` | ``"sfc"`` | ``"rad"`` | ``"gwd"`` | ``"mp"`` + +``SPP_MAG_LIST``: (Default: [ 0.2, 0.2, 0.75, 0.2, 0.2 ] ) + SPP perturbation magnitudes used in each parameterization. Corresponds to the variable ``spp_prt_list`` in ``input.nml`` + +``SPP_LSCALE``: (Default: [ 150000.0, 150000.0, 150000.0, 150000.0, 150000.0 ] ) + Decorrelation spatial scales in meters. + +``SPP_TSCALE``: (Default: [ 21600.0, 21600.0, 21600.0, 21600.0, 21600.0 ] ) + Decorrelation timescales in seconds. Corresponds to the variable ``spp_tau`` in ``input.nml``. + +``SPP_SIGTOP1``: (Default: [ 0.1, 0.1, 0.1, 0.1, 0.1 ] ) + Controls vertical tapering of perturbations at the tropopause and corresponds to the lower sigma level at which to taper perturbations to zero. + +``SPP_SIGTOP2``: (Default: [ 0.025, 0.025, 0.025, 0.025, 0.025 ] ) + Controls vertical tapering of perturbations at the tropopause and corresponds to the upper sigma level at which to taper perturbations to zero. + +``SPP_STDDEV_CUTOFF``: (Default: [ 1.5, 1.5, 2.5, 1.5, 1.5 ] ) + Limit for possible perturbation values in standard deviations from the mean. + + +Land Surface Model (LSM) SPP +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Land surface perturbations can be applied to land model parameters and land model prognostic variables. The LSM scheme is intended to address errors in the land model and land-atmosphere interactions. LSM perturbations include soil moisture content (SMC) (volume fraction), vegetation fraction (VGF), albedo (ALB), salinity (SAL), emissivity (EMI), surface roughness (ZOL) (in cm), and soil temperature (STC). Perturbations to soil moisture content (SMC) are only applied at the first time step. Only five perturbations at a time can be applied currently, but all seven are shown below. In addition, only one unique *iseed* value is allowed at the moment, and it is used for each pattern. + +The parameters below turn on SPP in Noah or RUC LSM (support for Noah MP is in progress). Please be aware of the :term:`SDF` that you choose if you wish to turn on Land Surface Model (LSM) SPP. SPP in LSM schemes is handled in the ``&nam_sfcperts`` namelist block instead of in ``&nam_sppperts``, where all other SPP is implemented. + +``DO_LSM_SPP``: (Default: false) + Turns on Land Surface Model (LSM) Stochastic Physics Parameterizations (SPP). When true, sets ``lndp_type=2``, which applies land perturbations to the selected paramaters using a newer scheme designed for data assimilation (DA) ensemble spread. LSM SPP perturbs uncertain land surface fields ("smc" "vgf" "alb" "sal" "emi" "zol" "stc") based on recommendations from physics experts. Valid values: ``True`` | ``False`` + +``LSM_SPP_TSCALE``: (Default: [ 21600, 21600, 21600, 21600, 21600, 21600, 21600 ] ) + Decorrelation timescales in seconds. + +``LSM_SPP_LSCALE``: (Default: [ 150000, 150000, 150000, 150000, 150000, 150000, 150000 ] ) + Decorrelation spatial scales in meters. + +``ISEED_LSM_SPP``: (Default: [ 9 ] ) + Seed to initialize the random perturbation pattern. + +``LSM_SPP_VAR_LIST``: (Default: [ "smc", "vgf", "alb", "sal", "emi", "zol", "stc" ] ) + Indicates which LSM variables to perturb. + +``LSM_SPP_MAG_LIST``: (Default: [ 0.017, 0.001, 0.001, 0.001, 0.001, 0.001, 0.2 ] ) + Sets the maximum random pattern amplitude for each of the LSM perturbations. + +.. _HaloBlend: + +Halo Blend Parameter +------------------------ +``HALO_BLEND``: (Default: 10) + Number of cells to use for "blending" the external solution (obtained from the :term:`LBCs`) with the internal solution from the FV3LAM :term:`dycore`. Specifically, it refers to the number of rows into the computational domain that should be blended with the LBCs. Cells at which blending occurs are all within the boundary of the native grid; they don't involve the 4 cells outside the boundary where the LBCs are specified (which is a different :term:`halo`). Blending is necessary to smooth out waves generated due to mismatch between the external and internal solutions. To shut :term:`halo` blending off, set this to zero. diff --git a/docs/UsersGuide/source/ContainerQuickstart.rst b/docs/UsersGuide/source/ContainerQuickstart.rst index 229db0a6eb..a83d87d631 100644 --- a/docs/UsersGuide/source/ContainerQuickstart.rst +++ b/docs/UsersGuide/source/ContainerQuickstart.rst @@ -4,14 +4,14 @@ Container-Based Quick Start Guide ==================================== -This Container-Based Quick Start Guide will help users build and run the "out-of-the-box" case for the Unified Forecast System (:term:`UFS`) Short-Range Weather (SRW) Application using a `Singularity `__ container. The :term:`container` approach provides a uniform enviroment in which to build and run the SRW App. Normally, the details of building and running the SRW App vary from system to system due to the many possible combinations of operating systems, compilers, :term:`MPI`'s, and package versions available. Installation via Singularity container reduces this variability and allows for a smoother SRW App build experience. Normally, containers can only run on a single compute node and are not compatible with the `Rocoto workflow manager `__, so users must run each task in the workflow manually. However, the Singularity container described in this chapter has been adapted such that it is able to run across multiple nodes using Rocoto. This makes it an excellent starting point for beginners. The :ref:`non-container approach ` may still be more appropriate for users who desire additional customizability, particularly if they already have experience running the SRW App. +This Container-Based Quick Start Guide will help users build and run the "out-of-the-box" case for the Unified Forecast System (:term:`UFS`) Short-Range Weather (SRW) Application using a `Singularity `__ container. The :term:`container` approach provides a uniform enviroment in which to build and run the SRW App. Normally, the details of building and running the SRW App vary from system to system due to the many possible combinations of operating systems, compilers, :term:`MPIs `, and package versions available. Installation via Singularity container reduces this variability and allows for a smoother SRW App build experience. Normally, containers can only run on a single compute node and are not compatible with the `Rocoto workflow manager `__, so users must run each task in the workflow manually. However, the Singularity container described in this chapter has been adapted such that it is able to run across multiple nodes using Rocoto. This makes it an excellent starting point for beginners. The :ref:`non-container build approach ` may still be more appropriate for users who desire additional customizability, particularly if they already have experience running the SRW App. The "out-of-the-box" SRW App case described in this User's Guide builds a weather forecast for June 15-16, 2019. Multiple convective weather events during these two days produced over 200 filtered storm reports. Severe weather was clustered in two areas: the Upper Midwest through the Ohio Valley and the Southern Great Plains. This forecast uses a predefined 25-km Continental United States (:term:`CONUS`) grid (RRFS_CONUS_25km), the Global Forecast System (:term:`GFS`) version 16 physics suite (FV3_GFS_v16 :term:`CCPP`), and :term:`FV3`-based GFS raw external model data for initialization. .. attention:: * The SRW Application has `four levels of support `__. The steps described in this chapter will work most smoothly on preconfigured (Level 1) systems. However, this guide can serve as a starting point for running the SRW App on other systems, too. - * This chapter of the User's Guide should **only** be used for container builds. For non-container builds, see :numref:`Chapter %s ` for a Quick Start Guide or :numref:`Chapter %s ` for a detailed guide to building the SRW App **without** a container. + * This chapter of the User's Guide should **only** be used for container builds. For non-container builds, see :numref:`Chapter %s ` for a Quick Start Guide or :numref:`Chapter %s ` for a detailed guide to building the SRW App **without** a container. .. _DownloadCodeC: @@ -28,7 +28,7 @@ Users must have an **Intel** compiler and :term:`MPI` (available for free `here Install Singularity ^^^^^^^^^^^^^^^^^^^^^^^ -To build and run the SRW App using a Singularity container, first install the Singularity package according to the `Singularity Installation Guide `__. This will include the installation of dependencies and the installation of the Go programming language. SingularityCE Version 3.7 or above is recommended. +To build and run the SRW App using a Singularity container, first install the Singularity package according to the `Singularity Installation Guide `__. This will include the installation of dependencies and the installation of the Go programming language. SingularityCE Version 3.7 or above is recommended. .. warning:: Docker containers can only be run with root privileges, and users cannot have root privileges on :term:`HPCs `. Therefore, it is not possible to build the SRW App, which uses the HPC-Stack, inside a Docker container on an HPC system. However, a Singularity image may be built directly from a Docker image for use on the system. @@ -124,11 +124,11 @@ On non-Level 1 systems, users should build the container in a writable sandbox: Some users may prefer to issue the command without the ``sudo`` prefix. Whether ``sudo`` is required is system-dependent. .. note:: - Users can choose to build a release version of the container (SRW App v2.0.0) using a similar command: + Users can choose to build a release version of the container (SRW App v2.1.0) using a similar command: .. code-block:: console - sudo singularity build --sandbox ubuntu20.04-intel-srwapp docker://noaaepic/ubuntu20.04-intel22-ufs-srwapp:release-public-v2 + sudo singularity build --sandbox ubuntu20.04-intel-srwapp docker://noaaepic/ubuntu20.04-intel-srwapp:release-public-v2.1.0 .. _WorkOnHPC: @@ -210,7 +210,7 @@ Users can run ``exit`` to exit the shell. Download and Stage the Data ============================ -The SRW App requires input files to run. These include static datasets, initial and boundary condition files, and model configuration files. On Level 1 systems, the data required to run SRW App tests are already available as long as the bind argument (starting with ``-B``) in :numref:`Step %s ` included the directory with the input model data. For Level 2-4 systems, the data must be added manually by the user. Detailed instructions on how to add the data can be found in :numref:`Section %s `. Sections :numref:`%s ` and :numref:`%s ` contain useful background information on the input and output files used in the SRW App. +The SRW App requires input files to run. These include static datasets, initial and boundary condition files, and model configuration files. On Level 1 systems, the data required to run SRW App tests are already available as long as the bind argument (starting with ``-B``) in :numref:`Step %s ` included the directory with the input model data. See :numref:`Table %s ` for Level 1 data locations. For Level 2-4 systems, the data must be added manually by the user. Detailed instructions on how to add the data can be found in :numref:`Section %s `. Sections :numref:`%s ` and :numref:`%s ` contain useful background information on the input and output files used in the SRW App. .. _GenerateForecastC: @@ -246,8 +246,8 @@ To activate the regional workflow, run the following commands: where: - * ```` is replaced with the actual path to the modulefiles on the user's system (often ``$PWD/modulefiles``), and - * ```` is a valid, lowercased machine/platform name (see the ``MACHINE`` variable in :numref:`Section %s `). + * ```` is replaced with the actual path to the modulefiles on the user's local system (often ``$PWD/modulefiles``), and + * ```` is a valid, lowercased machine/platform name (see the ``MACHINE`` variable in :numref:`Section %s `). The ``wflow_`` modulefile will then output instructions to activate the regional workflow. The user should run the commands specified in the modulefile output. For example, if the output says: @@ -273,7 +273,7 @@ where: * ``-c`` indicates the compiler on the user's local machine (e.g., ``intel/2022.1.2``) * ``-m`` indicates the :term:`MPI` on the user's local machine (e.g., ``impi/2022.1.2``) - * ```` refers to the local machine (e.g., ``hera``, ``jet``, ``noaacloud``, ``mac``). See ``MACHINE`` in :numref:`Section %s ` for a full list of options. + * ```` refers to the local machine (e.g., ``hera``, ``jet``, ``noaacloud``, ``mac``). See ``MACHINE`` in :numref:`Section %s ` for a full list of options. * ``-i`` indicates the name of the container image that was built in :numref:`Step %s ` (``ubuntu20.04-intel-srwapp`` or ``ubuntu20.04-intel-srwapp-develop.img`` by default). For example, on Hera, the command would be: @@ -299,7 +299,7 @@ From here, users can follow the steps below to configure the out-of-the-box SRW The default settings include a predefined 25-km :term:`CONUS` grid (RRFS_CONUS_25km), the :term:`GFS` v16 physics suite (FV3_GFS_v16 :term:`CCPP`), and :term:`FV3`-based GFS raw external model data for initialization. - #. Edit the ``MACHINE`` and ``ACCOUNT`` variables in the ``user:`` section of ``config.yaml``. See :numref:`Section %s ` for details on valid values. + #. Edit the ``MACHINE`` and ``ACCOUNT`` variables in the ``user:`` section of ``config.yaml``. See :numref:`Section %s ` for details on valid values. .. note:: @@ -310,7 +310,7 @@ From here, users can follow the steps below to configure the out-of-the-box SRW .. code-block:: console USE_CRON_TO_RELAUNCH: TRUE - CRON_RELAUNCH_INTVL_MNTS: 02 + CRON_RELAUNCH_INTVL_MNTS: 3 There are instructions for running the experiment via additional methods in :numref:`Section %s `. However, this technique (automation via :term:`crontab`) is the simplest option. @@ -326,7 +326,7 @@ From here, users can follow the steps below to configure the out-of-the-box SRW EXTRN_MDL_FILES_ICS: [] EXTRN_MDL_DATA_STORES: disk - On other systems, users will need to change the path for ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_FILES_LBCS`` (below) to reflect the location of the system's data. The location of the machine's global data can be viewed :ref:`here ` for Level 1 systems. Alternatively, the user can add the path to their local data if they downloaded it as described in :numref:`Section %s `. + On other systems, users will need to change the path for ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_FILES_LBCS`` (below) to reflect the location of the system's data. The location of the machine's global data can be viewed :ref:`here ` for Level 1 systems. Alternatively, the user can add the path to their local data if they downloaded it as described in :numref:`Section %s `. #. Edit the ``task_get_extrn_lbcs:`` section of the ``config.yaml`` to include the correct data paths to the lateral boundary conditions files. For example, on Hera, add: @@ -351,7 +351,7 @@ Run the following command to generate the workflow: .. code-block:: console - python generate_FV3LAM_wflow.py + ./generate_FV3LAM_wflow.py This workflow generation script creates an experiment directory and populates it with all the data needed to run through the workflow. The last line of output from this script should start with ``*/3 * * * *`` (or similar). @@ -387,8 +387,4 @@ where: New Experiment =============== -To run a new experiment in the container at a later time, users will need to rerun the commands in :numref:`Section %s ` to reactivate the regional workflow. Then, users can configure a new experiment by updating the environment variables in ``config.yaml`` to reflect the desired experiment configuration. Basic instructions appear in :numref:`Section %s ` above, and detailed instructions can be viewed in :numref:`Section %s `. After adjusting the configuration file, regenerate the experiment by running ``python generate_FV3LAM_wflow.py``. - -Plot the Output -=============== -Two python scripts are provided to generate plots from the FV3-LAM post-processed GRIB2 output. Information on how to generate the graphics can be found in :numref:`Chapter %s `. +To run a new experiment in the container at a later time, users will need to rerun the commands in :numref:`Section %s ` to reactivate the regional workflow. Then, users can configure a new experiment by updating the environment variables in ``config.yaml`` to reflect the desired experiment configuration. Basic instructions appear in :numref:`Section %s ` above, and detailed instructions can be viewed in :numref:`Section %s `. After adjusting the configuration file, regenerate the experiment by running ``./generate_FV3LAM_wflow.py``. diff --git a/docs/UsersGuide/source/ContributorsGuide.rst b/docs/UsersGuide/source/ContributorsGuide.rst deleted file mode 100644 index f63bf3d447..0000000000 --- a/docs/UsersGuide/source/ContributorsGuide.rst +++ /dev/null @@ -1,465 +0,0 @@ - -.. _ContributorsGuide: - -============================== -SRW App Contributor's Guide -============================== - -.. _Background: - -Background -=========== - -Authoritative branch ------------------------ - -The ``ufs-srweather-app`` and ``regional_workflow`` repositories each maintain a main branch for development called ``develop``. The HEAD of ``develop`` reflects the latest development changes. It points to regularly updated hashes for individual sub-components. Pull requests (PRs) will be merged to ``develop``. - -The ``develop`` branch is protected by the code management team: - #. Pull requests for this branch require approval by at least two code reviewers. - #. A code manager should perform at least one of the reviews and the merge, but other contributors are welcome to provide comments/suggestions. - - -Code Management Team --------------------------- - -Scientists from across multiple labs and organizations have volunteered to review pull requests for the ``develop`` branch: - -.. table:: - - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | **Organization** | **Reviewers** | **Areas of Expertise** | - +==================+================================================+=============================================================================================+ - | EMC | Chan-Hoo Jeon (@chan-hoo) | Workflow, Operational platform testing (WCOSS/NCO), and Air quality modeling (Online-CMAQ) | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Ben Blake (@BenjaminBlake-NOAA) | Output visualization, Rocoto | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Ratko Vasic (@RatkoVasic-NOAA) | Workflow, NCO requirements, and operational platform testing | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | EPIC | Mark Potts (@mark-a-potts) | HPC systems | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Jong Kim (@jkbk2004) | UFS Weather Model configuration, forecast sensitivity analysis, data assimilation | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Michael Lueken (@MichaelLueken) | SRW App code management | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Natalie Perlin (@natalie-perlin) | Generic Linux/Mac installations, hpc-stack/spack-stack | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Gillian Petro (@gspetro-NOAA) | Documentation | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Edward Snyder (@EdwardSnyder-NOAA) | WE2E testing, input data | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | GLERL | David Wright (@dmwright526) | FVCOM integration, output visualization, preprocessing tasks | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | GSL | Jeff Beck (@JeffBeck-NOAA) | SRW App configuration/workflow, code management, meteorological evaluation | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Gerard Ketefian (@gsketefian) | ``regional_workflow`` scripts, jinja templates, and verification tasks | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Linlin Pan (@panll) | Workflow, CCPP/physics, verification | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Christina Holt (@christinaholtNOAA) | Workflow, conda environment support, testing, and code management | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Christopher Harrop (@christopherwharrop-noaa) | Rocoto, code management, and testing | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Daniel Abdi (@danielabdi-noaa) | Workflow generation, testing RRFS on the cloud, environment modules | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | NCAR | Mike Kavulich (@mkavulich) | CCPP/physics | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Will Mayfield (@willmayfield) | Verification/METplus tasks, regional_workflow (esp. on Cheyenne) | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | NSSL | Yunheng Wang (@ywangwof) | HPC systems, code management and regional workflow especially on Stampede, Jet | - | | | and NSSL computers | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - -.. _ContribProcess: - -Contribution Process -======================== - -The steps below should be followed in order to make changes to the ``develop`` branch of the ``ufs-srweather-app`` or ``regional_workflow`` repositories. Communication with code managers and the code management team throughout the process is encouraged. - - #. **Issue** - Open an issue to document changes. Click `here `__ to open a new ``ufs-srweather-app`` issue or see :numref:`Step %s ` for detailed instructions. - #. **GitFlow** - Follow `GitFlow `__ procedures for development. - #. **Fork the repository** - Read more `here `__ about forking in GitHub. - #. **Create a branch** - Create a branch in your fork of the authoritative repository. Follow `GitFlow `__ conventions when creating the branch. All development should take place on a branch, *not* on ``develop``. Branches should be named as follows, where [name] is a one-word description of the branch: - - * **bugfix/[name]:** Fixes a demonstrably incorrect portion of code - * **feature/[name]:** Adds a new feature to the code or improves an existing portion of the code - * **text/[name]:** Changes elements of the repository that do not impact program output or log files (e.g., changes to README, documentation, comments, changing quoted Registry elements, white space alignment). Any change that does not impact the compiled code in any way should fall under this category. - - #. **Development** - Perform and test changes in the branch (not on ``develop``!). Document work in the issue and mention the issue number in commit messages to link your work to the issue (e.g., ``commit -m "Issue #23 - "``). Test code modifications on as many platforms as possible, and request help with further testing from the code management team when unable to test on all Level 1 platforms. Document changes to the workflow and capabilities in the ``.rst`` files so that the SRW App documentation stays up-to-date. - #. **Pull request** - When ready to merge changes back to the ``develop`` branch, the code developer should initiate a pull request (PR) of the feature branch into the ``develop`` branch. Read `here `__ about pull requests in GitHub. When a PR is initiated, the :ref:`PR Template